From 1dd3124a9770e11b6684e5dd1e6bc15a0aa3bc67 Mon Sep 17 00:00:00 2001 From: "Matth?us G. Chajdas" Date: Sun, 10 Nov 2019 13:56:53 +0100 Subject: Remove all files, redirect to GitHub. --- AUTHORS | 228 - CHANGES | 1325 ------ LICENSE | 25 - MANIFEST.in | 6 - Makefile | 71 - README | 3 + README.rst | 39 - TODO | 12 - bitbucket-pipelines.yml | 34 - doc/Makefile | 153 - doc/_static/favicon.ico | Bin 16958 -> 0 bytes doc/_static/logo_new.png | Bin 40944 -> 0 bytes doc/_static/logo_only.png | Bin 16424 -> 0 bytes doc/_templates/docssidebar.html | 3 - doc/_templates/indexsidebar.html | 25 - doc/_themes/pygments14/layout.html | 98 - doc/_themes/pygments14/static/bodybg.png | Bin 51903 -> 0 bytes doc/_themes/pygments14/static/docbg.png | Bin 61296 -> 0 bytes doc/_themes/pygments14/static/listitem.png | Bin 207 -> 0 bytes doc/_themes/pygments14/static/logo.png | Bin 26933 -> 0 bytes doc/_themes/pygments14/static/pocoo.png | Bin 2154 -> 0 bytes doc/_themes/pygments14/static/pygments14.css_t | 401 -- doc/_themes/pygments14/theme.conf | 15 - doc/conf.py | 241 - doc/docs/api.rst | 354 -- doc/docs/authors.rst | 4 - doc/docs/changelog.rst | 1 - doc/docs/cmdline.rst | 166 - doc/docs/filterdevelopment.rst | 71 - doc/docs/filters.rst | 41 - doc/docs/formatterdevelopment.rst | 169 - doc/docs/formatters.rst | 48 - doc/docs/index.rst | 66 - doc/docs/integrate.rst | 40 - doc/docs/java.rst | 70 - doc/docs/lexerdevelopment.rst | 728 --- doc/docs/lexers.rst | 69 - doc/docs/moinmoin.rst | 39 - doc/docs/plugins.rst | 93 - doc/docs/quickstart.rst | 205 - doc/docs/rstdirective.rst | 22 - doc/docs/styles.rst | 232 - doc/docs/tokens.rst | 372 -- doc/docs/unicode.rst | 58 - doc/download.rst | 41 - doc/faq.rst | 139 - doc/index.rst | 54 - doc/languages.rst | 170 - doc/make.bat | 190 - doc/pygmentize.1 | 94 - external/autopygmentize | 101 - external/lasso-builtins-generator-9.lasso | 162 - external/markdown-processor.py | 67 - external/moin-parser.py | 112 - external/pygments.bashcomp | 38 - external/rst-directive.py | 82 - pygmentize | 8 - pygments/__init__.py | 90 - pygments/cmdline.py | 573 --- pygments/console.py | 71 - pygments/filter.py | 74 - pygments/filters/__init__.py | 350 -- pygments/formatter.py | 95 - pygments/formatters/__init__.py | 154 - pygments/formatters/_mapping.py | 85 - pygments/formatters/bbcode.py | 109 - pygments/formatters/html.py | 880 ---- pygments/formatters/img.py | 600 --- pygments/formatters/irc.py | 182 - pygments/formatters/latex.py | 482 -- pygments/formatters/other.py | 160 - pygments/formatters/rtf.py | 147 - pygments/formatters/svg.py | 153 - pygments/formatters/terminal.py | 136 - pygments/formatters/terminal256.py | 315 -- pygments/lexer.py | 881 ---- pygments/lexers/__init__.py | 337 -- pygments/lexers/_asy_builtins.py | 1645 ------- pygments/lexers/_cl_builtins.py | 232 - pygments/lexers/_cocoa_builtins.py | 73 - pygments/lexers/_csound_builtins.py | 1660 ------- pygments/lexers/_lasso_builtins.py | 5327 ---------------------- pygments/lexers/_lua_builtins.py | 295 -- pygments/lexers/_mapping.py | 524 --- pygments/lexers/_mql_builtins.py | 1172 ----- pygments/lexers/_openedge_builtins.py | 2547 ----------- pygments/lexers/_php_builtins.py | 4756 ------------------- pygments/lexers/_postgres_builtins.py | 621 --- pygments/lexers/_scilab_builtins.py | 3094 ------------- pygments/lexers/_sourcemod_builtins.py | 1163 ----- pygments/lexers/_stan_builtins.py | 558 --- pygments/lexers/_stata_builtins.py | 421 -- pygments/lexers/_tsql_builtins.py | 1004 ---- pygments/lexers/_vbscript_builtins.py | 280 -- pygments/lexers/_vim_builtins.py | 1939 -------- pygments/lexers/actionscript.py | 240 - pygments/lexers/agile.py | 24 - pygments/lexers/algebra.py | 221 - pygments/lexers/ambient.py | 76 - pygments/lexers/ampl.py | 87 - pygments/lexers/apl.py | 101 - pygments/lexers/archetype.py | 318 -- pygments/lexers/asm.py | 774 ---- pygments/lexers/automation.py | 374 -- pygments/lexers/basic.py | 659 --- pygments/lexers/bibtex.py | 160 - pygments/lexers/boa.py | 102 - pygments/lexers/business.py | 612 --- pygments/lexers/c_cpp.py | 252 - pygments/lexers/c_like.py | 571 --- pygments/lexers/capnproto.py | 78 - pygments/lexers/chapel.py | 112 - pygments/lexers/clean.py | 178 - pygments/lexers/compiled.py | 34 - pygments/lexers/configs.py | 934 ---- pygments/lexers/console.py | 114 - pygments/lexers/crystal.py | 393 -- pygments/lexers/csound.py | 460 -- pygments/lexers/css.py | 692 --- pygments/lexers/d.py | 251 - pygments/lexers/dalvik.py | 125 - pygments/lexers/data.py | 561 --- pygments/lexers/diff.py | 165 - pygments/lexers/dotnet.py | 688 --- pygments/lexers/dsls.py | 878 ---- pygments/lexers/dylan.py | 289 -- pygments/lexers/ecl.py | 125 - pygments/lexers/eiffel.py | 65 - pygments/lexers/elm.py | 121 - pygments/lexers/erlang.py | 533 --- pygments/lexers/esoteric.py | 277 -- pygments/lexers/ezhil.py | 69 - pygments/lexers/factor.py | 344 -- pygments/lexers/fantom.py | 250 - pygments/lexers/felix.py | 273 -- pygments/lexers/floscript.py | 83 - pygments/lexers/forth.py | 179 - pygments/lexers/fortran.py | 206 - pygments/lexers/foxpro.py | 428 -- pygments/lexers/freefem.py | 898 ---- pygments/lexers/functional.py | 21 - pygments/lexers/go.py | 101 - pygments/lexers/grammar_notation.py | 213 - pygments/lexers/graph.py | 85 - pygments/lexers/graphics.py | 781 ---- pygments/lexers/haskell.py | 869 ---- pygments/lexers/haxe.py | 936 ---- pygments/lexers/hdl.py | 382 -- pygments/lexers/hexdump.py | 103 - pygments/lexers/html.py | 602 --- pygments/lexers/idl.py | 270 -- pygments/lexers/igor.py | 425 -- pygments/lexers/inferno.py | 96 - pygments/lexers/installers.py | 322 -- pygments/lexers/int_fiction.py | 1343 ------ pygments/lexers/iolang.py | 63 - pygments/lexers/j.py | 146 - pygments/lexers/javascript.py | 1525 ------- pygments/lexers/julia.py | 335 -- pygments/lexers/jvm.py | 1654 ------- pygments/lexers/lisp.py | 2693 ----------- pygments/lexers/make.py | 202 - pygments/lexers/markup.py | 599 --- pygments/lexers/math.py | 21 - pygments/lexers/matlab.py | 663 --- pygments/lexers/ml.py | 769 ---- pygments/lexers/modeling.py | 366 -- pygments/lexers/modula2.py | 1561 ------- pygments/lexers/monte.py | 204 - pygments/lexers/ncl.py | 894 ---- pygments/lexers/nimrod.py | 159 - pygments/lexers/nit.py | 64 - pygments/lexers/nix.py | 136 - pygments/lexers/oberon.py | 105 - pygments/lexers/objective.py | 504 -- pygments/lexers/ooc.py | 85 - pygments/lexers/other.py | 41 - pygments/lexers/parasail.py | 79 - pygments/lexers/parsers.py | 835 ---- pygments/lexers/pascal.py | 644 --- pygments/lexers/pawn.py | 199 - pygments/lexers/perl.py | 620 --- pygments/lexers/php.py | 270 -- pygments/lexers/pony.py | 94 - pygments/lexers/praat.py | 294 -- pygments/lexers/prolog.py | 305 -- pygments/lexers/python.py | 941 ---- pygments/lexers/qvt.py | 152 - pygments/lexers/r.py | 193 - pygments/lexers/rdf.py | 423 -- pygments/lexers/rebol.py | 431 -- pygments/lexers/resource.py | 85 - pygments/lexers/rnc.py | 67 - pygments/lexers/roboconf.py | 82 - pygments/lexers/robotframework.py | 560 --- pygments/lexers/ruby.py | 519 --- pygments/lexers/rust.py | 220 - pygments/lexers/sas.py | 228 - pygments/lexers/scripting.py | 1222 ----- pygments/lexers/sgf.py | 61 - pygments/lexers/shell.py | 833 ---- pygments/lexers/slash.py | 185 - pygments/lexers/smalltalk.py | 195 - pygments/lexers/smv.py | 79 - pygments/lexers/snobol.py | 83 - pygments/lexers/special.py | 103 - pygments/lexers/sql.py | 729 --- pygments/lexers/stata.py | 171 - pygments/lexers/supercollider.py | 90 - pygments/lexers/tcl.py | 145 - pygments/lexers/templates.py | 2282 --------- pygments/lexers/teraterm.py | 158 - pygments/lexers/testing.py | 207 - pygments/lexers/text.py | 26 - pygments/lexers/textedit.py | 169 - pygments/lexers/textfmts.py | 297 -- pygments/lexers/theorem.py | 456 -- pygments/lexers/trafficscript.py | 54 - pygments/lexers/typoscript.py | 219 - pygments/lexers/unicon.py | 390 -- pygments/lexers/urbi.py | 133 - pygments/lexers/varnish.py | 190 - pygments/lexers/verification.py | 111 - pygments/lexers/web.py | 24 - pygments/lexers/webmisc.py | 989 ---- pygments/lexers/whiley.py | 116 - pygments/lexers/x10.py | 69 - pygments/lexers/xorg.py | 37 - pygments/lexers/zig.py | 126 - pygments/modeline.py | 44 - pygments/plugin.py | 70 - pygments/regexopt.py | 92 - pygments/scanner.py | 105 - pygments/sphinxext.py | 158 - pygments/style.py | 182 - pygments/styles/__init__.py | 86 - pygments/styles/abap.py | 29 - pygments/styles/algol.py | 63 - pygments/styles/algol_nu.py | 63 - pygments/styles/arduino.py | 98 - pygments/styles/autumn.py | 65 - pygments/styles/borland.py | 51 - pygments/styles/bw.py | 49 - pygments/styles/colorful.py | 81 - pygments/styles/default.py | 73 - pygments/styles/emacs.py | 72 - pygments/styles/friendly.py | 72 - pygments/styles/fruity.py | 42 - pygments/styles/igor.py | 29 - pygments/styles/lovelace.py | 97 - pygments/styles/manni.py | 75 - pygments/styles/monokai.py | 106 - pygments/styles/murphy.py | 80 - pygments/styles/native.py | 65 - pygments/styles/paraiso_dark.py | 125 - pygments/styles/paraiso_light.py | 125 - pygments/styles/pastie.py | 75 - pygments/styles/perldoc.py | 69 - pygments/styles/rainbow_dash.py | 89 - pygments/styles/rrt.py | 33 - pygments/styles/sas.py | 44 - pygments/styles/solarized.py | 130 - pygments/styles/stata_dark.py | 41 - pygments/styles/stata_light.py | 39 - pygments/styles/tango.py | 141 - pygments/styles/trac.py | 63 - pygments/styles/vim.py | 63 - pygments/styles/vs.py | 38 - pygments/styles/xcode.py | 51 - pygments/token.py | 213 - pygments/unistring.py | 221 - pygments/util.py | 388 -- requirements.txt | 5 - scripts/check_sources.py | 212 - scripts/debug_lexer.py | 246 - scripts/detect_missing_analyse_text.py | 33 - scripts/epydoc.css | 280 -- scripts/find_error.py | 1 - scripts/get_vimkw.py | 74 - scripts/pylintrc | 301 -- scripts/release-checklist | 24 - scripts/vim2pygments.py | 935 ---- setup.cfg | 10 - setup.py | 83 - tests/dtds/HTML4-f.dtd | 37 - tests/dtds/HTML4-s.dtd | 869 ---- tests/dtds/HTML4.dcl | 88 - tests/dtds/HTML4.dtd | 1092 ----- tests/dtds/HTML4.soc | 9 - tests/dtds/HTMLlat1.ent | 195 - tests/dtds/HTMLspec.ent | 77 - tests/dtds/HTMLsym.ent | 241 - tests/examplefiles/99_bottles_of_beer.chpl | 206 - tests/examplefiles/AcidStateAdvanced.hs | 209 - tests/examplefiles/AlternatingGroup.mu | 102 - tests/examplefiles/BOM.js | 1 - tests/examplefiles/Blink.ino | 24 - tests/examplefiles/CPDictionary.j | 611 --- tests/examplefiles/Charmci.ci | 20 - tests/examplefiles/Config.in.cache | 1973 -------- tests/examplefiles/Constants.mo | 158 - tests/examplefiles/DancingSudoku.lhs | 411 -- tests/examplefiles/Deflate.fs | 578 --- tests/examplefiles/Error.pmod | 38 - tests/examplefiles/Errors.scala | 23 - tests/examplefiles/FakeFile.pike | 360 -- tests/examplefiles/Get-CommandDefinitionHtml.ps1 | 66 - tests/examplefiles/IPDispatchC.nc | 104 - tests/examplefiles/IPDispatchP.nc | 671 --- tests/examplefiles/Intro.java | 1660 ------- tests/examplefiles/Makefile | 1131 ----- tests/examplefiles/Object.st | 4394 ------------------ tests/examplefiles/OrderedMap.hx | 584 --- tests/examplefiles/RoleQ.pm6 | 23 - tests/examplefiles/SmallCheck.hs | 378 -- tests/examplefiles/Sorting.mod | 470 -- tests/examplefiles/StdGeneric.icl | 134 - tests/examplefiles/Sudoku.lhs | 382 -- tests/examplefiles/abnf_example1.abnf | 22 - tests/examplefiles/abnf_example2.abnf | 9 - tests/examplefiles/addressbook.proto | 30 - tests/examplefiles/ahcon.f | 340 -- tests/examplefiles/all.nit | 1986 -------- tests/examplefiles/antlr_ANTLRv3.g | 608 --- tests/examplefiles/antlr_throws | 1 - tests/examplefiles/apache2.conf | 398 -- tests/examplefiles/as3_test.as | 143 - tests/examplefiles/as3_test2.as | 46 - tests/examplefiles/as3_test3.as | 3 - tests/examplefiles/aspx-cs_example | 27 - tests/examplefiles/autoit_submit.au3 | 27 - tests/examplefiles/automake.mk | 7 - tests/examplefiles/badcase.java | 2 - tests/examplefiles/bigtest.nsi | 308 -- tests/examplefiles/bnf_example1.bnf | 15 - tests/examplefiles/boot-9.scm | 1557 ------- tests/examplefiles/ca65_example | 284 -- tests/examplefiles/capdl_example.cdl | 64 - tests/examplefiles/cbmbas_example | 9 - tests/examplefiles/cells.ps | 515 --- tests/examplefiles/ceval.c | 2604 ----------- tests/examplefiles/char.scala | 4 - tests/examplefiles/cheetah_example.html | 13 - tests/examplefiles/classes.dylan | 125 - tests/examplefiles/clojure-weird-keywords.clj | 5 - tests/examplefiles/condensed_ruby.rb | 10 - tests/examplefiles/coq_RelationClasses | 447 -- tests/examplefiles/core.cljs | 52 - tests/examplefiles/database.pytb | 20 - tests/examplefiles/de.MoinMoin.po | 2461 ---------- tests/examplefiles/demo.ahk | 181 - tests/examplefiles/demo.cfm | 50 - tests/examplefiles/demo.css.in | 6 - tests/examplefiles/demo.frt | 3 - tests/examplefiles/demo.hbs | 34 - tests/examplefiles/demo.js.in | 6 - tests/examplefiles/demo.thrift | 14 - tests/examplefiles/demo.xul.in | 7 - tests/examplefiles/django_sample.html+django | 68 - tests/examplefiles/docker.docker | 34 - tests/examplefiles/durexmania.aheui | 4 - tests/examplefiles/dwarf.cw | 17 - tests/examplefiles/eg_example1.eg | 155 - tests/examplefiles/ember.handlebars | 33 - tests/examplefiles/erl_session | 10 - tests/examplefiles/es6.js | 46 - tests/examplefiles/escape_semicolon.clj | 1 - tests/examplefiles/eval.rs | 606 --- tests/examplefiles/evil_regex.js | 48 - tests/examplefiles/example.Rd | 78 - tests/examplefiles/example.als | 217 - tests/examplefiles/example.bat | 211 - tests/examplefiles/example.bbc | 156 - tests/examplefiles/example.bc | 53 - tests/examplefiles/example.boa | 18 - tests/examplefiles/example.bug | 54 - tests/examplefiles/example.c | 2080 --------- tests/examplefiles/example.ceylon | 52 - tests/examplefiles/example.chai | 6 - tests/examplefiles/example.clay | 33 - tests/examplefiles/example.cls | 15 - tests/examplefiles/example.cob | 2620 ----------- tests/examplefiles/example.coffee | 27 - tests/examplefiles/example.cpp | 2363 ---------- tests/examplefiles/example.e | 124 - tests/examplefiles/example.elm | 58 - tests/examplefiles/example.ezt | 32 - tests/examplefiles/example.f90 | 8 - tests/examplefiles/example.feature | 16 - tests/examplefiles/example.fish | 580 --- tests/examplefiles/example.flo | 40 - tests/examplefiles/example.gd | 23 - tests/examplefiles/example.gi | 64 - tests/examplefiles/example.golo | 113 - tests/examplefiles/example.groovy | 2 - tests/examplefiles/example.gs | 106 - tests/examplefiles/example.gst | 7 - tests/examplefiles/example.hlsl | 168 - tests/examplefiles/example.hs | 41 - tests/examplefiles/example.hx | 192 - tests/examplefiles/example.i6t | 32 - tests/examplefiles/example.i7x | 45 - tests/examplefiles/example.icn | 283 -- tests/examplefiles/example.icon | 381 -- tests/examplefiles/example.j | 564 --- tests/examplefiles/example.jag | 48 - tests/examplefiles/example.java | 16 - tests/examplefiles/example.jcl | 31 - tests/examplefiles/example.jsgf | 28 - tests/examplefiles/example.jsonld | 27 - tests/examplefiles/example.juttle | 110 - tests/examplefiles/example.kal | 75 - tests/examplefiles/example.kt | 47 - tests/examplefiles/example.lagda | 19 - tests/examplefiles/example.liquid | 42 - tests/examplefiles/example.lua | 274 -- tests/examplefiles/example.ma | 8 - tests/examplefiles/example.mac | 6 - tests/examplefiles/example.md | 64 - tests/examplefiles/example.monkey | 152 - tests/examplefiles/example.moo | 26 - tests/examplefiles/example.moon | 629 --- tests/examplefiles/example.mq4 | 187 - tests/examplefiles/example.mqh | 123 - tests/examplefiles/example.msc | 43 - tests/examplefiles/example.ng2 | 11 - tests/examplefiles/example.ni | 57 - tests/examplefiles/example.nim | 1010 ---- tests/examplefiles/example.nix | 80 - tests/examplefiles/example.ns2 | 69 - tests/examplefiles/example.pas | 2708 ----------- tests/examplefiles/example.pcmk | 115 - tests/examplefiles/example.pony | 18 - tests/examplefiles/example.pp | 8 - tests/examplefiles/example.praat | 280 -- tests/examplefiles/example.prg | 161 - tests/examplefiles/example.rb | 1852 -------- tests/examplefiles/example.red | 257 -- tests/examplefiles/example.reds | 150 - tests/examplefiles/example.reg | 19 - tests/examplefiles/example.rexx | 50 - tests/examplefiles/example.rhtml | 561 --- tests/examplefiles/example.rkt | 743 --- tests/examplefiles/example.rpf | 4 - tests/examplefiles/example.rts | 118 - tests/examplefiles/example.sbl | 109 - tests/examplefiles/example.scd | 76 - tests/examplefiles/example.sgf | 35 - tests/examplefiles/example.sh | 22 - tests/examplefiles/example.sh-session | 19 - tests/examplefiles/example.shell-session | 45 - tests/examplefiles/example.shex | 20 - tests/examplefiles/example.sl | 6 - tests/examplefiles/example.slim | 31 - tests/examplefiles/example.sls | 51 - tests/examplefiles/example.sml | 156 - tests/examplefiles/example.snobol | 15 - tests/examplefiles/example.stan | 122 - tests/examplefiles/example.tap | 37 - tests/examplefiles/example.tasm | 527 --- tests/examplefiles/example.tea | 34 - tests/examplefiles/example.tf | 208 - tests/examplefiles/example.thy | 751 --- tests/examplefiles/example.todotxt | 9 - tests/examplefiles/example.toml | 181 - tests/examplefiles/example.ttl | 43 - tests/examplefiles/example.u | 547 --- tests/examplefiles/example.u1 | 111 - tests/examplefiles/example.vbs | 55 - tests/examplefiles/example.weechatlog | 11 - tests/examplefiles/example.whiley | 296 -- tests/examplefiles/example.x10 | 9 - tests/examplefiles/example.xhtml | 376 -- tests/examplefiles/example.xtend | 34 - tests/examplefiles/example.xtm | 1101 ----- tests/examplefiles/example.yaml | 311 -- tests/examplefiles/example.zig | 263 -- tests/examplefiles/example1.cadl | 149 - tests/examplefiles/example2.aspx | 29 - tests/examplefiles/example2.cpp | 20 - tests/examplefiles/example2.msc | 79 - tests/examplefiles/exampleScript.cfc | 241 - tests/examplefiles/exampleTag.cfc | 18 - tests/examplefiles/example_coq.v | 4 - tests/examplefiles/example_elixir.ex | 233 - tests/examplefiles/example_file.fy | 128 - tests/examplefiles/ezhil_primefactors.n | 152 - tests/examplefiles/fennelview.fnl | 156 - tests/examplefiles/fibonacci.tokigun.aheui | 4 - tests/examplefiles/firefox.mak | 586 --- tests/examplefiles/flatline_example | 186 - tests/examplefiles/flipflop.sv | 19 - tests/examplefiles/foo.sce | 6 - tests/examplefiles/format.ml | 1213 ----- tests/examplefiles/freefem.edp | 94 - tests/examplefiles/fucked_up.rb | 77 - tests/examplefiles/function.mu | 1 - tests/examplefiles/functional.rst | 1472 ------ tests/examplefiles/garcia-wachs.kk | 133 - tests/examplefiles/genclass.clj | 510 --- tests/examplefiles/genshi_example.xml+genshi | 193 - tests/examplefiles/genshitext_example.genshitext | 33 - tests/examplefiles/glsl.frag | 7 - tests/examplefiles/glsl.vert | 13 - tests/examplefiles/grammar-test.p6 | 22 - tests/examplefiles/guidance.smv | 1124 ----- tests/examplefiles/hash_syntax.rb | 5 - tests/examplefiles/hello-world.puzzlet.aheui | 10 - tests/examplefiles/hello.at | 6 - tests/examplefiles/hello.golo | 5 - tests/examplefiles/hello.lsl | 12 - tests/examplefiles/hello.smali | 40 - tests/examplefiles/hello.sp | 9 - tests/examplefiles/hexdump_debugexe | 309 -- tests/examplefiles/hexdump_hd | 310 -- tests/examplefiles/hexdump_hexcat | 247 - tests/examplefiles/hexdump_hexdump | 310 -- tests/examplefiles/hexdump_od | 310 -- tests/examplefiles/hexdump_xxd | 309 -- tests/examplefiles/html+php_faulty.php | 1 - tests/examplefiles/http_request_example | 15 - tests/examplefiles/http_response_example | 29 - tests/examplefiles/hybris_File.hy | 174 - tests/examplefiles/idl_sample.pro | 73 - tests/examplefiles/iex_example | 23 - tests/examplefiles/inet_pton6.dg | 71 - tests/examplefiles/inform6_example | 375 -- tests/examplefiles/interp.scala | 10 - tests/examplefiles/intro.ik | 24 - tests/examplefiles/ints.php | 10 - tests/examplefiles/intsyn.fun | 675 --- tests/examplefiles/intsyn.sig | 286 -- tests/examplefiles/irb_heredoc | 8 - tests/examplefiles/irc.lsp | 214 - tests/examplefiles/java.properties | 16 - tests/examplefiles/jbst_example1.jbst | 28 - tests/examplefiles/jbst_example2.jbst | 45 - tests/examplefiles/jinjadesignerdoc.rst | 713 --- tests/examplefiles/json.lasso | 301 -- tests/examplefiles/json.lasso9 | 213 - tests/examplefiles/language.hy | 165 - tests/examplefiles/lighttpd_config.conf | 13 - tests/examplefiles/limbo.b | 456 -- tests/examplefiles/linecontinuation.py | 47 - tests/examplefiles/livescript-demo.ls | 43 - tests/examplefiles/logos_example.xm | 28 - tests/examplefiles/ltmain.sh | 2849 ------------ tests/examplefiles/main.cmake | 45 - tests/examplefiles/markdown.lsp | 679 --- tests/examplefiles/matlab_noreturn | 3 - tests/examplefiles/matlab_sample | 34 - tests/examplefiles/matlabsession_sample.txt | 37 - tests/examplefiles/metagrammar.treetop | 455 -- tests/examplefiles/minehunt.qml | 112 - tests/examplefiles/minimal.ns2 | 4 - tests/examplefiles/modula2_test_cases.def | 354 -- tests/examplefiles/moin_SyntaxReference.txt | 340 -- tests/examplefiles/multiline_regexes.rb | 38 - tests/examplefiles/nanomsg.intr | 95 - tests/examplefiles/nasm_aoutso.asm | 96 - tests/examplefiles/nasm_objexe.asm | 30 - tests/examplefiles/nemerle_sample.n | 87 - tests/examplefiles/nginx_nginx.conf | 118 - tests/examplefiles/noexcept.cpp | 8 - tests/examplefiles/numbers.c | 12 - tests/examplefiles/objc_example.m | 179 - tests/examplefiles/openedge_example | 34 - tests/examplefiles/pacman.conf | 49 - tests/examplefiles/pacman.ijs | 1107 ----- tests/examplefiles/pawn_example | 25 - tests/examplefiles/perl_misc | 62 - tests/examplefiles/perl_perl5db | 998 ---- tests/examplefiles/perl_regex-delims | 120 - tests/examplefiles/perlfunc.1 | 856 ---- tests/examplefiles/phpMyAdmin.spec | 163 - tests/examplefiles/phpcomplete.vim | 567 --- tests/examplefiles/pkgconfig_example.pc | 18 - tests/examplefiles/plain.bst | 1097 ----- tests/examplefiles/pleac.in.rb | 1223 ----- tests/examplefiles/postgresql_test.txt | 81 - tests/examplefiles/pppoe.applescript | 10 - tests/examplefiles/psql_session.txt | 122 - tests/examplefiles/py3_test.txt | 2 - tests/examplefiles/py3tb_test.py3tb | 4 - tests/examplefiles/pycon_ctrlc_traceback | 118 - tests/examplefiles/pycon_test.pycon | 17 - tests/examplefiles/pytb_test2.pytb | 2 - tests/examplefiles/pytb_test3.pytb | 4 - tests/examplefiles/python25-bsd.mak | 234 - tests/examplefiles/qbasic_example | 2 - tests/examplefiles/qsort.prolog | 13 - tests/examplefiles/r-console-transcript.Rout | 38 - tests/examplefiles/r6rs-comments.scm | 23 - tests/examplefiles/ragel-cpp_rlscan | 280 -- tests/examplefiles/ragel-cpp_snippet | 2 - tests/examplefiles/regex.js | 22 - tests/examplefiles/resourcebundle_demo | 9 - tests/examplefiles/reversi.lsp | 427 -- tests/examplefiles/rnc_example.rnc | 33 - tests/examplefiles/roboconf.graph | 40 - tests/examplefiles/roboconf.instances | 24 - tests/examplefiles/robotframework_test.txt | 40 - tests/examplefiles/rql-queries.rql | 34 - tests/examplefiles/ruby_func_def.rb | 11 - tests/examplefiles/sample.qvto | 4 - tests/examplefiles/scilab.sci | 30 - tests/examplefiles/scope.cirru | 237 - tests/examplefiles/session.dylan-console | 9 - tests/examplefiles/sibling.prolog | 19 - tests/examplefiles/simple.camkes | 38 - tests/examplefiles/simple.croc | 747 --- tests/examplefiles/smarty_example.html | 209 - tests/examplefiles/source.lgt | 343 -- tests/examplefiles/sources.list | 62 - tests/examplefiles/sparql.rq | 48 - tests/examplefiles/sphere.pov | 18 - tests/examplefiles/sqlite3.sqlite3-console | 27 - tests/examplefiles/squid.conf | 30 - tests/examplefiles/string.jl | 1031 ----- tests/examplefiles/string_delimiters.d | 21 - tests/examplefiles/stripheredoc.sh | 3 - tests/examplefiles/subr.el | 4868 -------------------- tests/examplefiles/swig_java.swg | 1329 ------ tests/examplefiles/swig_std_vector.i | 225 - tests/examplefiles/tads3_example.t | 1248 ----- tests/examplefiles/teraterm.ttl | 34 - tests/examplefiles/termcap | 1340 ------ tests/examplefiles/terminfo | 1445 ------ tests/examplefiles/test-3.0.xq | 185 - tests/examplefiles/test-exist-update.xq | 75 - tests/examplefiles/test.R | 185 - tests/examplefiles/test.adb | 211 - tests/examplefiles/test.adls | 313 -- tests/examplefiles/test.agda | 109 - tests/examplefiles/test.apl | 26 - tests/examplefiles/test.asy | 131 - tests/examplefiles/test.awk | 121 - tests/examplefiles/test.bb | 95 - tests/examplefiles/test.bib | 77 - tests/examplefiles/test.bmx | 145 - tests/examplefiles/test.boo | 39 - tests/examplefiles/test.bpl | 140 - tests/examplefiles/test.bro | 250 - tests/examplefiles/test.cadl | 32 - tests/examplefiles/test.cr | 2871 ------------ tests/examplefiles/test.cs | 374 -- tests/examplefiles/test.csd | 18 - tests/examplefiles/test.css | 54 - tests/examplefiles/test.cu | 36 - tests/examplefiles/test.cyp | 123 - tests/examplefiles/test.d | 135 - tests/examplefiles/test.dart | 23 - tests/examplefiles/test.dtd | 89 - tests/examplefiles/test.ebnf | 31 - tests/examplefiles/test.ec | 605 --- tests/examplefiles/test.eh | 315 -- tests/examplefiles/test.erl | 181 - tests/examplefiles/test.escript | 4 - tests/examplefiles/test.evoque | 33 - tests/examplefiles/test.fan | 818 ---- tests/examplefiles/test.flx | 57 - tests/examplefiles/test.gdc | 13 - tests/examplefiles/test.gradle | 20 - tests/examplefiles/test.groovy | 97 - tests/examplefiles/test.hsail | 62 - tests/examplefiles/test.html | 339 -- tests/examplefiles/test.idr | 101 - tests/examplefiles/test.ini | 10 - tests/examplefiles/test.java | 653 --- tests/examplefiles/test.jsp | 24 - tests/examplefiles/test.lean | 217 - tests/examplefiles/test.maql | 45 - tests/examplefiles/test.mask | 41 - tests/examplefiles/test.mod | 374 -- tests/examplefiles/test.moo | 51 - tests/examplefiles/test.mt | 7 - tests/examplefiles/test.myt | 166 - tests/examplefiles/test.ncl | 20 - tests/examplefiles/test.nim | 93 - tests/examplefiles/test.odin | 43 - tests/examplefiles/test.opa | 10 - tests/examplefiles/test.orc | 81 - tests/examplefiles/test.p6 | 252 - tests/examplefiles/test.pan | 54 - tests/examplefiles/test.pas | 743 --- tests/examplefiles/test.php | 544 --- tests/examplefiles/test.pig | 148 - tests/examplefiles/test.plot | 333 -- tests/examplefiles/test.ps1 | 108 - tests/examplefiles/test.psl | 182 - tests/examplefiles/test.pwn | 253 - tests/examplefiles/test.pypylog | 1000 ---- tests/examplefiles/test.r3 | 114 - tests/examplefiles/test.rb | 177 - tests/examplefiles/test.rhtml | 43 - tests/examplefiles/test.rsl | 111 - tests/examplefiles/test.scaml | 8 - tests/examplefiles/test.sco | 22 - tests/examplefiles/test.shen | 137 - tests/examplefiles/test.sil | 206 - tests/examplefiles/test.ssp | 12 - tests/examplefiles/test.swift | 65 - tests/examplefiles/test.tcsh | 830 ---- tests/examplefiles/test.vb | 407 -- tests/examplefiles/test.vhdl | 161 - tests/examplefiles/test.xqy | 138 - tests/examplefiles/test.xsl | 23 - tests/examplefiles/test.zep | 33 - tests/examplefiles/test2.odin | 30 - tests/examplefiles/test2.pypylog | 120 - tests/examplefiles/test_basic.adls | 28 - tests/examplefiles/truncated.pytb | 15 - tests/examplefiles/tsql_example.sql | 72 - tests/examplefiles/twig_test | 4612 ------------------- tests/examplefiles/type.lisp | 1218 ----- tests/examplefiles/typescript_example | 39 - tests/examplefiles/typoscript_example | 1930 -------- tests/examplefiles/underscore.coffee | 603 --- tests/examplefiles/unicode.applescript | 5 - tests/examplefiles/unicode.go | 10 - tests/examplefiles/unicode.js | 6 - tests/examplefiles/unicodedoc.py | 11 - tests/examplefiles/unix-io.lid | 37 - tests/examplefiles/varnish.vcl | 187 - tests/examplefiles/vbnet_test.bas | 29 - tests/examplefiles/vctreestatus_hg | 4 - tests/examplefiles/vimrc | 21 - tests/examplefiles/vpath.mk | 16 - tests/examplefiles/wdiff_example1.wdiff | 731 --- tests/examplefiles/wdiff_example3.wdiff | 10 - tests/examplefiles/webkit-transition.css | 3 - tests/examplefiles/while.pov | 13 - tests/examplefiles/wiki.factor | 384 -- tests/examplefiles/xml_example | 1897 -------- tests/examplefiles/xorg.conf | 48 - tests/examplefiles/yahalom.cpsa | 34 - tests/examplefiles/zmlrpc.f90 | 798 ---- tests/run.py | 58 - tests/string_asserts.py | 22 - tests/support.py | 17 - tests/support/empty.py | 1 - tests/support/html_formatter.py | 6 - tests/support/python_lexer.py | 12 - tests/support/tags | 36 - tests/test_asm.py | 30 - tests/test_basic.py | 74 - tests/test_basic_api.py | 334 -- tests/test_bibtex.py | 236 - tests/test_cfm.py | 46 - tests/test_clexer.py | 259 -- tests/test_cmdline.py | 313 -- tests/test_cpp.py | 33 - tests/test_crystal.py | 308 -- tests/test_csound.py | 491 -- tests/test_data.py | 117 - tests/test_examplefiles.py | 138 - tests/test_ezhil.py | 183 - tests/test_html_formatter.py | 200 - tests/test_inherit.py | 94 - tests/test_irc_formatter.py | 30 - tests/test_java.py | 78 - tests/test_javascript.py | 84 - tests/test_julia.py | 58 - tests/test_kotlin.py | 131 - tests/test_latex_formatter.py | 54 - tests/test_lexers_other.py | 80 - tests/test_markdown_lexer.py | 31 - tests/test_modeline.py | 26 - tests/test_objectiveclexer.py | 92 - tests/test_perllexer.py | 157 - tests/test_php.py | 36 - tests/test_praat.py | 130 - tests/test_properties.py | 89 - tests/test_python.py | 133 - tests/test_qbasiclexer.py | 43 - tests/test_r.py | 70 - tests/test_regexlexer.py | 66 - tests/test_regexopt.py | 110 - tests/test_rtf_formatter.py | 109 - tests/test_ruby.py | 145 - tests/test_shell.py | 159 - tests/test_smarty.py | 40 - tests/test_sql.py | 118 - tests/test_string_asserts.py | 35 - tests/test_terminal_formatter.py | 102 - tests/test_textfmts.py | 41 - tests/test_token.py | 54 - tests/test_unistring.py | 48 - tests/test_using_api.py | 40 - tests/test_util.py | 213 - tests/test_whiley.py | 30 - tox.ini | 7 - 792 files changed, 3 insertions(+), 229172 deletions(-) delete mode 100644 AUTHORS delete mode 100644 CHANGES delete mode 100644 LICENSE delete mode 100644 MANIFEST.in delete mode 100644 Makefile create mode 100644 README delete mode 100644 README.rst delete mode 100644 TODO delete mode 100644 bitbucket-pipelines.yml delete mode 100644 doc/Makefile delete mode 100644 doc/_static/favicon.ico delete mode 100644 doc/_static/logo_new.png delete mode 100644 doc/_static/logo_only.png delete mode 100644 doc/_templates/docssidebar.html delete mode 100644 doc/_templates/indexsidebar.html delete mode 100644 doc/_themes/pygments14/layout.html delete mode 100644 doc/_themes/pygments14/static/bodybg.png delete mode 100644 doc/_themes/pygments14/static/docbg.png delete mode 100644 doc/_themes/pygments14/static/listitem.png delete mode 100644 doc/_themes/pygments14/static/logo.png delete mode 100644 doc/_themes/pygments14/static/pocoo.png delete mode 100644 doc/_themes/pygments14/static/pygments14.css_t delete mode 100644 doc/_themes/pygments14/theme.conf delete mode 100644 doc/conf.py delete mode 100644 doc/docs/api.rst delete mode 100644 doc/docs/authors.rst delete mode 100644 doc/docs/changelog.rst delete mode 100644 doc/docs/cmdline.rst delete mode 100644 doc/docs/filterdevelopment.rst delete mode 100644 doc/docs/filters.rst delete mode 100644 doc/docs/formatterdevelopment.rst delete mode 100644 doc/docs/formatters.rst delete mode 100644 doc/docs/index.rst delete mode 100644 doc/docs/integrate.rst delete mode 100644 doc/docs/java.rst delete mode 100644 doc/docs/lexerdevelopment.rst delete mode 100644 doc/docs/lexers.rst delete mode 100644 doc/docs/moinmoin.rst delete mode 100644 doc/docs/plugins.rst delete mode 100644 doc/docs/quickstart.rst delete mode 100644 doc/docs/rstdirective.rst delete mode 100644 doc/docs/styles.rst delete mode 100644 doc/docs/tokens.rst delete mode 100644 doc/docs/unicode.rst delete mode 100644 doc/download.rst delete mode 100644 doc/faq.rst delete mode 100644 doc/index.rst delete mode 100644 doc/languages.rst delete mode 100644 doc/make.bat delete mode 100644 doc/pygmentize.1 delete mode 100755 external/autopygmentize delete mode 100755 external/lasso-builtins-generator-9.lasso delete mode 100644 external/markdown-processor.py delete mode 100644 external/moin-parser.py delete mode 100644 external/pygments.bashcomp delete mode 100644 external/rst-directive.py delete mode 100755 pygmentize delete mode 100644 pygments/__init__.py delete mode 100644 pygments/cmdline.py delete mode 100644 pygments/console.py delete mode 100644 pygments/filter.py delete mode 100644 pygments/filters/__init__.py delete mode 100644 pygments/formatter.py delete mode 100644 pygments/formatters/__init__.py delete mode 100755 pygments/formatters/_mapping.py delete mode 100644 pygments/formatters/bbcode.py delete mode 100644 pygments/formatters/html.py delete mode 100644 pygments/formatters/img.py delete mode 100644 pygments/formatters/irc.py delete mode 100644 pygments/formatters/latex.py delete mode 100644 pygments/formatters/other.py delete mode 100644 pygments/formatters/rtf.py delete mode 100644 pygments/formatters/svg.py delete mode 100644 pygments/formatters/terminal.py delete mode 100644 pygments/formatters/terminal256.py delete mode 100644 pygments/lexer.py delete mode 100644 pygments/lexers/__init__.py delete mode 100644 pygments/lexers/_asy_builtins.py delete mode 100644 pygments/lexers/_cl_builtins.py delete mode 100644 pygments/lexers/_cocoa_builtins.py delete mode 100644 pygments/lexers/_csound_builtins.py delete mode 100644 pygments/lexers/_lasso_builtins.py delete mode 100644 pygments/lexers/_lua_builtins.py delete mode 100644 pygments/lexers/_mapping.py delete mode 100644 pygments/lexers/_mql_builtins.py delete mode 100644 pygments/lexers/_openedge_builtins.py delete mode 100644 pygments/lexers/_php_builtins.py delete mode 100644 pygments/lexers/_postgres_builtins.py delete mode 100644 pygments/lexers/_scilab_builtins.py delete mode 100644 pygments/lexers/_sourcemod_builtins.py delete mode 100644 pygments/lexers/_stan_builtins.py delete mode 100644 pygments/lexers/_stata_builtins.py delete mode 100644 pygments/lexers/_tsql_builtins.py delete mode 100644 pygments/lexers/_vbscript_builtins.py delete mode 100644 pygments/lexers/_vim_builtins.py delete mode 100644 pygments/lexers/actionscript.py delete mode 100644 pygments/lexers/agile.py delete mode 100644 pygments/lexers/algebra.py delete mode 100644 pygments/lexers/ambient.py delete mode 100644 pygments/lexers/ampl.py delete mode 100644 pygments/lexers/apl.py delete mode 100644 pygments/lexers/archetype.py delete mode 100644 pygments/lexers/asm.py delete mode 100644 pygments/lexers/automation.py delete mode 100644 pygments/lexers/basic.py delete mode 100644 pygments/lexers/bibtex.py delete mode 100644 pygments/lexers/boa.py delete mode 100644 pygments/lexers/business.py delete mode 100644 pygments/lexers/c_cpp.py delete mode 100644 pygments/lexers/c_like.py delete mode 100644 pygments/lexers/capnproto.py delete mode 100644 pygments/lexers/chapel.py delete mode 100644 pygments/lexers/clean.py delete mode 100644 pygments/lexers/compiled.py delete mode 100644 pygments/lexers/configs.py delete mode 100644 pygments/lexers/console.py delete mode 100644 pygments/lexers/crystal.py delete mode 100644 pygments/lexers/csound.py delete mode 100644 pygments/lexers/css.py delete mode 100644 pygments/lexers/d.py delete mode 100644 pygments/lexers/dalvik.py delete mode 100644 pygments/lexers/data.py delete mode 100644 pygments/lexers/diff.py delete mode 100644 pygments/lexers/dotnet.py delete mode 100644 pygments/lexers/dsls.py delete mode 100644 pygments/lexers/dylan.py delete mode 100644 pygments/lexers/ecl.py delete mode 100644 pygments/lexers/eiffel.py delete mode 100644 pygments/lexers/elm.py delete mode 100644 pygments/lexers/erlang.py delete mode 100644 pygments/lexers/esoteric.py delete mode 100644 pygments/lexers/ezhil.py delete mode 100644 pygments/lexers/factor.py delete mode 100644 pygments/lexers/fantom.py delete mode 100644 pygments/lexers/felix.py delete mode 100644 pygments/lexers/floscript.py delete mode 100644 pygments/lexers/forth.py delete mode 100644 pygments/lexers/fortran.py delete mode 100644 pygments/lexers/foxpro.py delete mode 100644 pygments/lexers/freefem.py delete mode 100644 pygments/lexers/functional.py delete mode 100644 pygments/lexers/go.py delete mode 100644 pygments/lexers/grammar_notation.py delete mode 100644 pygments/lexers/graph.py delete mode 100644 pygments/lexers/graphics.py delete mode 100644 pygments/lexers/haskell.py delete mode 100644 pygments/lexers/haxe.py delete mode 100644 pygments/lexers/hdl.py delete mode 100644 pygments/lexers/hexdump.py delete mode 100644 pygments/lexers/html.py delete mode 100644 pygments/lexers/idl.py delete mode 100644 pygments/lexers/igor.py delete mode 100644 pygments/lexers/inferno.py delete mode 100644 pygments/lexers/installers.py delete mode 100644 pygments/lexers/int_fiction.py delete mode 100644 pygments/lexers/iolang.py delete mode 100644 pygments/lexers/j.py delete mode 100644 pygments/lexers/javascript.py delete mode 100644 pygments/lexers/julia.py delete mode 100644 pygments/lexers/jvm.py delete mode 100644 pygments/lexers/lisp.py delete mode 100644 pygments/lexers/make.py delete mode 100644 pygments/lexers/markup.py delete mode 100644 pygments/lexers/math.py delete mode 100644 pygments/lexers/matlab.py delete mode 100644 pygments/lexers/ml.py delete mode 100644 pygments/lexers/modeling.py delete mode 100644 pygments/lexers/modula2.py delete mode 100644 pygments/lexers/monte.py delete mode 100644 pygments/lexers/ncl.py delete mode 100644 pygments/lexers/nimrod.py delete mode 100644 pygments/lexers/nit.py delete mode 100644 pygments/lexers/nix.py delete mode 100644 pygments/lexers/oberon.py delete mode 100644 pygments/lexers/objective.py delete mode 100644 pygments/lexers/ooc.py delete mode 100644 pygments/lexers/other.py delete mode 100644 pygments/lexers/parasail.py delete mode 100644 pygments/lexers/parsers.py delete mode 100644 pygments/lexers/pascal.py delete mode 100644 pygments/lexers/pawn.py delete mode 100644 pygments/lexers/perl.py delete mode 100644 pygments/lexers/php.py delete mode 100644 pygments/lexers/pony.py delete mode 100644 pygments/lexers/praat.py delete mode 100644 pygments/lexers/prolog.py delete mode 100644 pygments/lexers/python.py delete mode 100644 pygments/lexers/qvt.py delete mode 100644 pygments/lexers/r.py delete mode 100644 pygments/lexers/rdf.py delete mode 100644 pygments/lexers/rebol.py delete mode 100644 pygments/lexers/resource.py delete mode 100644 pygments/lexers/rnc.py delete mode 100644 pygments/lexers/roboconf.py delete mode 100644 pygments/lexers/robotframework.py delete mode 100644 pygments/lexers/ruby.py delete mode 100644 pygments/lexers/rust.py delete mode 100644 pygments/lexers/sas.py delete mode 100644 pygments/lexers/scripting.py delete mode 100644 pygments/lexers/sgf.py delete mode 100644 pygments/lexers/shell.py delete mode 100644 pygments/lexers/slash.py delete mode 100644 pygments/lexers/smalltalk.py delete mode 100644 pygments/lexers/smv.py delete mode 100644 pygments/lexers/snobol.py delete mode 100644 pygments/lexers/special.py delete mode 100644 pygments/lexers/sql.py delete mode 100644 pygments/lexers/stata.py delete mode 100644 pygments/lexers/supercollider.py delete mode 100644 pygments/lexers/tcl.py delete mode 100644 pygments/lexers/templates.py delete mode 100644 pygments/lexers/teraterm.py delete mode 100644 pygments/lexers/testing.py delete mode 100644 pygments/lexers/text.py delete mode 100644 pygments/lexers/textedit.py delete mode 100644 pygments/lexers/textfmts.py delete mode 100644 pygments/lexers/theorem.py delete mode 100644 pygments/lexers/trafficscript.py delete mode 100644 pygments/lexers/typoscript.py delete mode 100644 pygments/lexers/unicon.py delete mode 100644 pygments/lexers/urbi.py delete mode 100644 pygments/lexers/varnish.py delete mode 100644 pygments/lexers/verification.py delete mode 100644 pygments/lexers/web.py delete mode 100644 pygments/lexers/webmisc.py delete mode 100644 pygments/lexers/whiley.py delete mode 100644 pygments/lexers/x10.py delete mode 100644 pygments/lexers/xorg.py delete mode 100644 pygments/lexers/zig.py delete mode 100644 pygments/modeline.py delete mode 100644 pygments/plugin.py delete mode 100644 pygments/regexopt.py delete mode 100644 pygments/scanner.py delete mode 100644 pygments/sphinxext.py delete mode 100644 pygments/style.py delete mode 100644 pygments/styles/__init__.py delete mode 100644 pygments/styles/abap.py delete mode 100644 pygments/styles/algol.py delete mode 100644 pygments/styles/algol_nu.py delete mode 100644 pygments/styles/arduino.py delete mode 100644 pygments/styles/autumn.py delete mode 100644 pygments/styles/borland.py delete mode 100644 pygments/styles/bw.py delete mode 100644 pygments/styles/colorful.py delete mode 100644 pygments/styles/default.py delete mode 100644 pygments/styles/emacs.py delete mode 100644 pygments/styles/friendly.py delete mode 100644 pygments/styles/fruity.py delete mode 100644 pygments/styles/igor.py delete mode 100644 pygments/styles/lovelace.py delete mode 100644 pygments/styles/manni.py delete mode 100644 pygments/styles/monokai.py delete mode 100644 pygments/styles/murphy.py delete mode 100644 pygments/styles/native.py delete mode 100644 pygments/styles/paraiso_dark.py delete mode 100644 pygments/styles/paraiso_light.py delete mode 100644 pygments/styles/pastie.py delete mode 100644 pygments/styles/perldoc.py delete mode 100644 pygments/styles/rainbow_dash.py delete mode 100644 pygments/styles/rrt.py delete mode 100644 pygments/styles/sas.py delete mode 100644 pygments/styles/solarized.py delete mode 100644 pygments/styles/stata_dark.py delete mode 100644 pygments/styles/stata_light.py delete mode 100644 pygments/styles/tango.py delete mode 100644 pygments/styles/trac.py delete mode 100644 pygments/styles/vim.py delete mode 100644 pygments/styles/vs.py delete mode 100644 pygments/styles/xcode.py delete mode 100644 pygments/token.py delete mode 100644 pygments/unistring.py delete mode 100644 pygments/util.py delete mode 100644 requirements.txt delete mode 100755 scripts/check_sources.py delete mode 100755 scripts/debug_lexer.py delete mode 100644 scripts/detect_missing_analyse_text.py delete mode 100644 scripts/epydoc.css delete mode 120000 scripts/find_error.py delete mode 100644 scripts/get_vimkw.py delete mode 100644 scripts/pylintrc delete mode 100644 scripts/release-checklist delete mode 100755 scripts/vim2pygments.py delete mode 100644 setup.cfg delete mode 100755 setup.py delete mode 100644 tests/dtds/HTML4-f.dtd delete mode 100644 tests/dtds/HTML4-s.dtd delete mode 100644 tests/dtds/HTML4.dcl delete mode 100644 tests/dtds/HTML4.dtd delete mode 100644 tests/dtds/HTML4.soc delete mode 100644 tests/dtds/HTMLlat1.ent delete mode 100644 tests/dtds/HTMLspec.ent delete mode 100644 tests/dtds/HTMLsym.ent delete mode 100644 tests/examplefiles/99_bottles_of_beer.chpl delete mode 100644 tests/examplefiles/AcidStateAdvanced.hs delete mode 100644 tests/examplefiles/AlternatingGroup.mu delete mode 100644 tests/examplefiles/BOM.js delete mode 100644 tests/examplefiles/Blink.ino delete mode 100755 tests/examplefiles/CPDictionary.j delete mode 100644 tests/examplefiles/Charmci.ci delete mode 100644 tests/examplefiles/Config.in.cache delete mode 100644 tests/examplefiles/Constants.mo delete mode 100644 tests/examplefiles/DancingSudoku.lhs delete mode 100755 tests/examplefiles/Deflate.fs delete mode 100644 tests/examplefiles/Error.pmod delete mode 100644 tests/examplefiles/Errors.scala delete mode 100644 tests/examplefiles/FakeFile.pike delete mode 100644 tests/examplefiles/Get-CommandDefinitionHtml.ps1 delete mode 100644 tests/examplefiles/IPDispatchC.nc delete mode 100644 tests/examplefiles/IPDispatchP.nc delete mode 100644 tests/examplefiles/Intro.java delete mode 100644 tests/examplefiles/Makefile delete mode 100644 tests/examplefiles/Object.st delete mode 100644 tests/examplefiles/OrderedMap.hx delete mode 100644 tests/examplefiles/RoleQ.pm6 delete mode 100644 tests/examplefiles/SmallCheck.hs delete mode 100644 tests/examplefiles/Sorting.mod delete mode 100644 tests/examplefiles/StdGeneric.icl delete mode 100644 tests/examplefiles/Sudoku.lhs delete mode 100644 tests/examplefiles/abnf_example1.abnf delete mode 100644 tests/examplefiles/abnf_example2.abnf delete mode 100644 tests/examplefiles/addressbook.proto delete mode 100644 tests/examplefiles/ahcon.f delete mode 100644 tests/examplefiles/all.nit delete mode 100644 tests/examplefiles/antlr_ANTLRv3.g delete mode 100644 tests/examplefiles/antlr_throws delete mode 100644 tests/examplefiles/apache2.conf delete mode 100644 tests/examplefiles/as3_test.as delete mode 100644 tests/examplefiles/as3_test2.as delete mode 100644 tests/examplefiles/as3_test3.as delete mode 100644 tests/examplefiles/aspx-cs_example delete mode 100644 tests/examplefiles/autoit_submit.au3 delete mode 100644 tests/examplefiles/automake.mk delete mode 100644 tests/examplefiles/badcase.java delete mode 100644 tests/examplefiles/bigtest.nsi delete mode 100644 tests/examplefiles/bnf_example1.bnf delete mode 100644 tests/examplefiles/boot-9.scm delete mode 100644 tests/examplefiles/ca65_example delete mode 100644 tests/examplefiles/capdl_example.cdl delete mode 100644 tests/examplefiles/cbmbas_example delete mode 100644 tests/examplefiles/cells.ps delete mode 100644 tests/examplefiles/ceval.c delete mode 100644 tests/examplefiles/char.scala delete mode 100644 tests/examplefiles/cheetah_example.html delete mode 100644 tests/examplefiles/classes.dylan delete mode 100644 tests/examplefiles/clojure-weird-keywords.clj delete mode 100644 tests/examplefiles/condensed_ruby.rb delete mode 100644 tests/examplefiles/coq_RelationClasses delete mode 100644 tests/examplefiles/core.cljs delete mode 100644 tests/examplefiles/database.pytb delete mode 100644 tests/examplefiles/de.MoinMoin.po delete mode 100644 tests/examplefiles/demo.ahk delete mode 100644 tests/examplefiles/demo.cfm delete mode 100644 tests/examplefiles/demo.css.in delete mode 100644 tests/examplefiles/demo.frt delete mode 100644 tests/examplefiles/demo.hbs delete mode 100644 tests/examplefiles/demo.js.in delete mode 100644 tests/examplefiles/demo.thrift delete mode 100644 tests/examplefiles/demo.xul.in delete mode 100644 tests/examplefiles/django_sample.html+django delete mode 100644 tests/examplefiles/docker.docker delete mode 100644 tests/examplefiles/durexmania.aheui delete mode 100644 tests/examplefiles/dwarf.cw delete mode 100644 tests/examplefiles/eg_example1.eg delete mode 100644 tests/examplefiles/ember.handlebars delete mode 100644 tests/examplefiles/erl_session delete mode 100644 tests/examplefiles/es6.js delete mode 100644 tests/examplefiles/escape_semicolon.clj delete mode 100644 tests/examplefiles/eval.rs delete mode 100644 tests/examplefiles/evil_regex.js delete mode 100644 tests/examplefiles/example.Rd delete mode 100644 tests/examplefiles/example.als delete mode 100644 tests/examplefiles/example.bat delete mode 100644 tests/examplefiles/example.bbc delete mode 100644 tests/examplefiles/example.bc delete mode 100644 tests/examplefiles/example.boa delete mode 100644 tests/examplefiles/example.bug delete mode 100644 tests/examplefiles/example.c delete mode 100644 tests/examplefiles/example.ceylon delete mode 100644 tests/examplefiles/example.chai delete mode 100644 tests/examplefiles/example.clay delete mode 100644 tests/examplefiles/example.cls delete mode 100644 tests/examplefiles/example.cob delete mode 100644 tests/examplefiles/example.coffee delete mode 100644 tests/examplefiles/example.cpp delete mode 100644 tests/examplefiles/example.e delete mode 100644 tests/examplefiles/example.elm delete mode 100644 tests/examplefiles/example.ezt delete mode 100644 tests/examplefiles/example.f90 delete mode 100644 tests/examplefiles/example.feature delete mode 100644 tests/examplefiles/example.fish delete mode 100644 tests/examplefiles/example.flo delete mode 100644 tests/examplefiles/example.gd delete mode 100644 tests/examplefiles/example.gi delete mode 100644 tests/examplefiles/example.golo delete mode 100755 tests/examplefiles/example.groovy delete mode 100644 tests/examplefiles/example.gs delete mode 100644 tests/examplefiles/example.gst delete mode 100644 tests/examplefiles/example.hlsl delete mode 100644 tests/examplefiles/example.hs delete mode 100644 tests/examplefiles/example.hx delete mode 100644 tests/examplefiles/example.i6t delete mode 100644 tests/examplefiles/example.i7x delete mode 100644 tests/examplefiles/example.icn delete mode 100644 tests/examplefiles/example.icon delete mode 100644 tests/examplefiles/example.j delete mode 100644 tests/examplefiles/example.jag delete mode 100644 tests/examplefiles/example.java delete mode 100644 tests/examplefiles/example.jcl delete mode 100644 tests/examplefiles/example.jsgf delete mode 100644 tests/examplefiles/example.jsonld delete mode 100644 tests/examplefiles/example.juttle delete mode 100644 tests/examplefiles/example.kal delete mode 100644 tests/examplefiles/example.kt delete mode 100644 tests/examplefiles/example.lagda delete mode 100644 tests/examplefiles/example.liquid delete mode 100644 tests/examplefiles/example.lua delete mode 100644 tests/examplefiles/example.ma delete mode 100644 tests/examplefiles/example.mac delete mode 100644 tests/examplefiles/example.md delete mode 100644 tests/examplefiles/example.monkey delete mode 100644 tests/examplefiles/example.moo delete mode 100644 tests/examplefiles/example.moon delete mode 100644 tests/examplefiles/example.mq4 delete mode 100644 tests/examplefiles/example.mqh delete mode 100644 tests/examplefiles/example.msc delete mode 100644 tests/examplefiles/example.ng2 delete mode 100644 tests/examplefiles/example.ni delete mode 100644 tests/examplefiles/example.nim delete mode 100644 tests/examplefiles/example.nix delete mode 100644 tests/examplefiles/example.ns2 delete mode 100644 tests/examplefiles/example.pas delete mode 100644 tests/examplefiles/example.pcmk delete mode 100644 tests/examplefiles/example.pony delete mode 100644 tests/examplefiles/example.pp delete mode 100644 tests/examplefiles/example.praat delete mode 100644 tests/examplefiles/example.prg delete mode 100644 tests/examplefiles/example.rb delete mode 100644 tests/examplefiles/example.red delete mode 100644 tests/examplefiles/example.reds delete mode 100644 tests/examplefiles/example.reg delete mode 100644 tests/examplefiles/example.rexx delete mode 100644 tests/examplefiles/example.rhtml delete mode 100644 tests/examplefiles/example.rkt delete mode 100644 tests/examplefiles/example.rpf delete mode 100644 tests/examplefiles/example.rts delete mode 100644 tests/examplefiles/example.sbl delete mode 100644 tests/examplefiles/example.scd delete mode 100644 tests/examplefiles/example.sgf delete mode 100644 tests/examplefiles/example.sh delete mode 100644 tests/examplefiles/example.sh-session delete mode 100644 tests/examplefiles/example.shell-session delete mode 100644 tests/examplefiles/example.shex delete mode 100644 tests/examplefiles/example.sl delete mode 100644 tests/examplefiles/example.slim delete mode 100644 tests/examplefiles/example.sls delete mode 100644 tests/examplefiles/example.sml delete mode 100644 tests/examplefiles/example.snobol delete mode 100644 tests/examplefiles/example.stan delete mode 100644 tests/examplefiles/example.tap delete mode 100644 tests/examplefiles/example.tasm delete mode 100644 tests/examplefiles/example.tea delete mode 100644 tests/examplefiles/example.tf delete mode 100644 tests/examplefiles/example.thy delete mode 100644 tests/examplefiles/example.todotxt delete mode 100644 tests/examplefiles/example.toml delete mode 100644 tests/examplefiles/example.ttl delete mode 100644 tests/examplefiles/example.u delete mode 100644 tests/examplefiles/example.u1 delete mode 100644 tests/examplefiles/example.vbs delete mode 100644 tests/examplefiles/example.weechatlog delete mode 100644 tests/examplefiles/example.whiley delete mode 100644 tests/examplefiles/example.x10 delete mode 100644 tests/examplefiles/example.xhtml delete mode 100644 tests/examplefiles/example.xtend delete mode 100644 tests/examplefiles/example.xtm delete mode 100644 tests/examplefiles/example.yaml delete mode 100644 tests/examplefiles/example.zig delete mode 100644 tests/examplefiles/example1.cadl delete mode 100644 tests/examplefiles/example2.aspx delete mode 100644 tests/examplefiles/example2.cpp delete mode 100644 tests/examplefiles/example2.msc delete mode 100644 tests/examplefiles/exampleScript.cfc delete mode 100644 tests/examplefiles/exampleTag.cfc delete mode 100644 tests/examplefiles/example_coq.v delete mode 100644 tests/examplefiles/example_elixir.ex delete mode 100644 tests/examplefiles/example_file.fy delete mode 100644 tests/examplefiles/ezhil_primefactors.n delete mode 100644 tests/examplefiles/fennelview.fnl delete mode 100644 tests/examplefiles/fibonacci.tokigun.aheui delete mode 100644 tests/examplefiles/firefox.mak delete mode 100644 tests/examplefiles/flatline_example delete mode 100644 tests/examplefiles/flipflop.sv delete mode 100644 tests/examplefiles/foo.sce delete mode 100644 tests/examplefiles/format.ml delete mode 100644 tests/examplefiles/freefem.edp delete mode 100644 tests/examplefiles/fucked_up.rb delete mode 100644 tests/examplefiles/function.mu delete mode 100644 tests/examplefiles/functional.rst delete mode 100644 tests/examplefiles/garcia-wachs.kk delete mode 100644 tests/examplefiles/genclass.clj delete mode 100644 tests/examplefiles/genshi_example.xml+genshi delete mode 100644 tests/examplefiles/genshitext_example.genshitext delete mode 100644 tests/examplefiles/glsl.frag delete mode 100644 tests/examplefiles/glsl.vert delete mode 100644 tests/examplefiles/grammar-test.p6 delete mode 100644 tests/examplefiles/guidance.smv delete mode 100644 tests/examplefiles/hash_syntax.rb delete mode 100644 tests/examplefiles/hello-world.puzzlet.aheui delete mode 100644 tests/examplefiles/hello.at delete mode 100644 tests/examplefiles/hello.golo delete mode 100644 tests/examplefiles/hello.lsl delete mode 100644 tests/examplefiles/hello.smali delete mode 100644 tests/examplefiles/hello.sp delete mode 100644 tests/examplefiles/hexdump_debugexe delete mode 100644 tests/examplefiles/hexdump_hd delete mode 100644 tests/examplefiles/hexdump_hexcat delete mode 100644 tests/examplefiles/hexdump_hexdump delete mode 100644 tests/examplefiles/hexdump_od delete mode 100644 tests/examplefiles/hexdump_xxd delete mode 100644 tests/examplefiles/html+php_faulty.php delete mode 100644 tests/examplefiles/http_request_example delete mode 100644 tests/examplefiles/http_response_example delete mode 100644 tests/examplefiles/hybris_File.hy delete mode 100644 tests/examplefiles/idl_sample.pro delete mode 100644 tests/examplefiles/iex_example delete mode 100644 tests/examplefiles/inet_pton6.dg delete mode 100644 tests/examplefiles/inform6_example delete mode 100644 tests/examplefiles/interp.scala delete mode 100644 tests/examplefiles/intro.ik delete mode 100644 tests/examplefiles/ints.php delete mode 100644 tests/examplefiles/intsyn.fun delete mode 100644 tests/examplefiles/intsyn.sig delete mode 100644 tests/examplefiles/irb_heredoc delete mode 100755 tests/examplefiles/irc.lsp delete mode 100644 tests/examplefiles/java.properties delete mode 100644 tests/examplefiles/jbst_example1.jbst delete mode 100644 tests/examplefiles/jbst_example2.jbst delete mode 100644 tests/examplefiles/jinjadesignerdoc.rst delete mode 100644 tests/examplefiles/json.lasso delete mode 100644 tests/examplefiles/json.lasso9 delete mode 100644 tests/examplefiles/language.hy delete mode 100644 tests/examplefiles/lighttpd_config.conf delete mode 100644 tests/examplefiles/limbo.b delete mode 100644 tests/examplefiles/linecontinuation.py delete mode 100644 tests/examplefiles/livescript-demo.ls delete mode 100644 tests/examplefiles/logos_example.xm delete mode 100644 tests/examplefiles/ltmain.sh delete mode 100644 tests/examplefiles/main.cmake delete mode 100755 tests/examplefiles/markdown.lsp delete mode 100644 tests/examplefiles/matlab_noreturn delete mode 100644 tests/examplefiles/matlab_sample delete mode 100644 tests/examplefiles/matlabsession_sample.txt delete mode 100644 tests/examplefiles/metagrammar.treetop delete mode 100644 tests/examplefiles/minehunt.qml delete mode 100644 tests/examplefiles/minimal.ns2 delete mode 100644 tests/examplefiles/modula2_test_cases.def delete mode 100644 tests/examplefiles/moin_SyntaxReference.txt delete mode 100644 tests/examplefiles/multiline_regexes.rb delete mode 100644 tests/examplefiles/nanomsg.intr delete mode 100644 tests/examplefiles/nasm_aoutso.asm delete mode 100644 tests/examplefiles/nasm_objexe.asm delete mode 100644 tests/examplefiles/nemerle_sample.n delete mode 100644 tests/examplefiles/nginx_nginx.conf delete mode 100644 tests/examplefiles/noexcept.cpp delete mode 100644 tests/examplefiles/numbers.c delete mode 100644 tests/examplefiles/objc_example.m delete mode 100644 tests/examplefiles/openedge_example delete mode 100644 tests/examplefiles/pacman.conf delete mode 100644 tests/examplefiles/pacman.ijs delete mode 100644 tests/examplefiles/pawn_example delete mode 100644 tests/examplefiles/perl_misc delete mode 100644 tests/examplefiles/perl_perl5db delete mode 100644 tests/examplefiles/perl_regex-delims delete mode 100644 tests/examplefiles/perlfunc.1 delete mode 100644 tests/examplefiles/phpMyAdmin.spec delete mode 100644 tests/examplefiles/phpcomplete.vim delete mode 100644 tests/examplefiles/pkgconfig_example.pc delete mode 100644 tests/examplefiles/plain.bst delete mode 100644 tests/examplefiles/pleac.in.rb delete mode 100644 tests/examplefiles/postgresql_test.txt delete mode 100644 tests/examplefiles/pppoe.applescript delete mode 100644 tests/examplefiles/psql_session.txt delete mode 100644 tests/examplefiles/py3_test.txt delete mode 100644 tests/examplefiles/py3tb_test.py3tb delete mode 100644 tests/examplefiles/pycon_ctrlc_traceback delete mode 100644 tests/examplefiles/pycon_test.pycon delete mode 100644 tests/examplefiles/pytb_test2.pytb delete mode 100644 tests/examplefiles/pytb_test3.pytb delete mode 100644 tests/examplefiles/python25-bsd.mak delete mode 100644 tests/examplefiles/qbasic_example delete mode 100644 tests/examplefiles/qsort.prolog delete mode 100644 tests/examplefiles/r-console-transcript.Rout delete mode 100644 tests/examplefiles/r6rs-comments.scm delete mode 100644 tests/examplefiles/ragel-cpp_rlscan delete mode 100644 tests/examplefiles/ragel-cpp_snippet delete mode 100644 tests/examplefiles/regex.js delete mode 100644 tests/examplefiles/resourcebundle_demo delete mode 100644 tests/examplefiles/reversi.lsp delete mode 100644 tests/examplefiles/rnc_example.rnc delete mode 100644 tests/examplefiles/roboconf.graph delete mode 100644 tests/examplefiles/roboconf.instances delete mode 100644 tests/examplefiles/robotframework_test.txt delete mode 100644 tests/examplefiles/rql-queries.rql delete mode 100644 tests/examplefiles/ruby_func_def.rb delete mode 100644 tests/examplefiles/sample.qvto delete mode 100644 tests/examplefiles/scilab.sci delete mode 100644 tests/examplefiles/scope.cirru delete mode 100644 tests/examplefiles/session.dylan-console delete mode 100644 tests/examplefiles/sibling.prolog delete mode 100644 tests/examplefiles/simple.camkes delete mode 100644 tests/examplefiles/simple.croc delete mode 100644 tests/examplefiles/smarty_example.html delete mode 100644 tests/examplefiles/source.lgt delete mode 100644 tests/examplefiles/sources.list delete mode 100644 tests/examplefiles/sparql.rq delete mode 100644 tests/examplefiles/sphere.pov delete mode 100644 tests/examplefiles/sqlite3.sqlite3-console delete mode 100644 tests/examplefiles/squid.conf delete mode 100644 tests/examplefiles/string.jl delete mode 100644 tests/examplefiles/string_delimiters.d delete mode 100644 tests/examplefiles/stripheredoc.sh delete mode 100644 tests/examplefiles/subr.el delete mode 100644 tests/examplefiles/swig_java.swg delete mode 100644 tests/examplefiles/swig_std_vector.i delete mode 100644 tests/examplefiles/tads3_example.t delete mode 100644 tests/examplefiles/teraterm.ttl delete mode 100644 tests/examplefiles/termcap delete mode 100644 tests/examplefiles/terminfo delete mode 100644 tests/examplefiles/test-3.0.xq delete mode 100644 tests/examplefiles/test-exist-update.xq delete mode 100644 tests/examplefiles/test.R delete mode 100644 tests/examplefiles/test.adb delete mode 100644 tests/examplefiles/test.adls delete mode 100644 tests/examplefiles/test.agda delete mode 100644 tests/examplefiles/test.apl delete mode 100644 tests/examplefiles/test.asy delete mode 100644 tests/examplefiles/test.awk delete mode 100644 tests/examplefiles/test.bb delete mode 100644 tests/examplefiles/test.bib delete mode 100644 tests/examplefiles/test.bmx delete mode 100644 tests/examplefiles/test.boo delete mode 100644 tests/examplefiles/test.bpl delete mode 100644 tests/examplefiles/test.bro delete mode 100644 tests/examplefiles/test.cadl delete mode 100644 tests/examplefiles/test.cr delete mode 100644 tests/examplefiles/test.cs delete mode 100644 tests/examplefiles/test.csd delete mode 100644 tests/examplefiles/test.css delete mode 100644 tests/examplefiles/test.cu delete mode 100644 tests/examplefiles/test.cyp delete mode 100644 tests/examplefiles/test.d delete mode 100644 tests/examplefiles/test.dart delete mode 100644 tests/examplefiles/test.dtd delete mode 100644 tests/examplefiles/test.ebnf delete mode 100644 tests/examplefiles/test.ec delete mode 100644 tests/examplefiles/test.eh delete mode 100644 tests/examplefiles/test.erl delete mode 100644 tests/examplefiles/test.escript delete mode 100644 tests/examplefiles/test.evoque delete mode 100755 tests/examplefiles/test.fan delete mode 100644 tests/examplefiles/test.flx delete mode 100644 tests/examplefiles/test.gdc delete mode 100644 tests/examplefiles/test.gradle delete mode 100644 tests/examplefiles/test.groovy delete mode 100644 tests/examplefiles/test.hsail delete mode 100644 tests/examplefiles/test.html delete mode 100644 tests/examplefiles/test.idr delete mode 100644 tests/examplefiles/test.ini delete mode 100644 tests/examplefiles/test.java delete mode 100644 tests/examplefiles/test.jsp delete mode 100644 tests/examplefiles/test.lean delete mode 100644 tests/examplefiles/test.maql delete mode 100644 tests/examplefiles/test.mask delete mode 100644 tests/examplefiles/test.mod delete mode 100644 tests/examplefiles/test.moo delete mode 100644 tests/examplefiles/test.mt delete mode 100644 tests/examplefiles/test.myt delete mode 100644 tests/examplefiles/test.ncl delete mode 100644 tests/examplefiles/test.nim delete mode 100644 tests/examplefiles/test.odin delete mode 100644 tests/examplefiles/test.opa delete mode 100644 tests/examplefiles/test.orc delete mode 100644 tests/examplefiles/test.p6 delete mode 100644 tests/examplefiles/test.pan delete mode 100644 tests/examplefiles/test.pas delete mode 100644 tests/examplefiles/test.php delete mode 100644 tests/examplefiles/test.pig delete mode 100644 tests/examplefiles/test.plot delete mode 100644 tests/examplefiles/test.ps1 delete mode 100644 tests/examplefiles/test.psl delete mode 100644 tests/examplefiles/test.pwn delete mode 100644 tests/examplefiles/test.pypylog delete mode 100644 tests/examplefiles/test.r3 delete mode 100644 tests/examplefiles/test.rb delete mode 100644 tests/examplefiles/test.rhtml delete mode 100644 tests/examplefiles/test.rsl delete mode 100644 tests/examplefiles/test.scaml delete mode 100644 tests/examplefiles/test.sco delete mode 100644 tests/examplefiles/test.shen delete mode 100644 tests/examplefiles/test.sil delete mode 100644 tests/examplefiles/test.ssp delete mode 100644 tests/examplefiles/test.swift delete mode 100644 tests/examplefiles/test.tcsh delete mode 100644 tests/examplefiles/test.vb delete mode 100644 tests/examplefiles/test.vhdl delete mode 100644 tests/examplefiles/test.xqy delete mode 100644 tests/examplefiles/test.xsl delete mode 100644 tests/examplefiles/test.zep delete mode 100644 tests/examplefiles/test2.odin delete mode 100644 tests/examplefiles/test2.pypylog delete mode 100644 tests/examplefiles/test_basic.adls delete mode 100644 tests/examplefiles/truncated.pytb delete mode 100644 tests/examplefiles/tsql_example.sql delete mode 100644 tests/examplefiles/twig_test delete mode 100644 tests/examplefiles/type.lisp delete mode 100644 tests/examplefiles/typescript_example delete mode 100644 tests/examplefiles/typoscript_example delete mode 100644 tests/examplefiles/underscore.coffee delete mode 100644 tests/examplefiles/unicode.applescript delete mode 100644 tests/examplefiles/unicode.go delete mode 100644 tests/examplefiles/unicode.js delete mode 100644 tests/examplefiles/unicodedoc.py delete mode 100644 tests/examplefiles/unix-io.lid delete mode 100644 tests/examplefiles/varnish.vcl delete mode 100644 tests/examplefiles/vbnet_test.bas delete mode 100644 tests/examplefiles/vctreestatus_hg delete mode 100644 tests/examplefiles/vimrc delete mode 100644 tests/examplefiles/vpath.mk delete mode 100644 tests/examplefiles/wdiff_example1.wdiff delete mode 100644 tests/examplefiles/wdiff_example3.wdiff delete mode 100644 tests/examplefiles/webkit-transition.css delete mode 100644 tests/examplefiles/while.pov delete mode 100644 tests/examplefiles/wiki.factor delete mode 100644 tests/examplefiles/xml_example delete mode 100644 tests/examplefiles/xorg.conf delete mode 100644 tests/examplefiles/yahalom.cpsa delete mode 100644 tests/examplefiles/zmlrpc.f90 delete mode 100644 tests/run.py delete mode 100644 tests/string_asserts.py delete mode 100644 tests/support.py delete mode 100644 tests/support/empty.py delete mode 100644 tests/support/html_formatter.py delete mode 100644 tests/support/python_lexer.py delete mode 100644 tests/support/tags delete mode 100644 tests/test_asm.py delete mode 100644 tests/test_basic.py delete mode 100644 tests/test_basic_api.py delete mode 100644 tests/test_bibtex.py delete mode 100644 tests/test_cfm.py delete mode 100644 tests/test_clexer.py delete mode 100644 tests/test_cmdline.py delete mode 100644 tests/test_cpp.py delete mode 100644 tests/test_crystal.py delete mode 100644 tests/test_csound.py delete mode 100644 tests/test_data.py delete mode 100644 tests/test_examplefiles.py delete mode 100644 tests/test_ezhil.py delete mode 100644 tests/test_html_formatter.py delete mode 100644 tests/test_inherit.py delete mode 100644 tests/test_irc_formatter.py delete mode 100644 tests/test_java.py delete mode 100644 tests/test_javascript.py delete mode 100644 tests/test_julia.py delete mode 100644 tests/test_kotlin.py delete mode 100644 tests/test_latex_formatter.py delete mode 100644 tests/test_lexers_other.py delete mode 100644 tests/test_markdown_lexer.py delete mode 100644 tests/test_modeline.py delete mode 100644 tests/test_objectiveclexer.py delete mode 100644 tests/test_perllexer.py delete mode 100644 tests/test_php.py delete mode 100644 tests/test_praat.py delete mode 100644 tests/test_properties.py delete mode 100644 tests/test_python.py delete mode 100644 tests/test_qbasiclexer.py delete mode 100644 tests/test_r.py delete mode 100644 tests/test_regexlexer.py delete mode 100644 tests/test_regexopt.py delete mode 100644 tests/test_rtf_formatter.py delete mode 100644 tests/test_ruby.py delete mode 100644 tests/test_shell.py delete mode 100644 tests/test_smarty.py delete mode 100644 tests/test_sql.py delete mode 100644 tests/test_string_asserts.py delete mode 100644 tests/test_terminal_formatter.py delete mode 100644 tests/test_textfmts.py delete mode 100644 tests/test_token.py delete mode 100644 tests/test_unistring.py delete mode 100644 tests/test_using_api.py delete mode 100644 tests/test_util.py delete mode 100644 tests/test_whiley.py delete mode 100644 tox.ini diff --git a/AUTHORS b/AUTHORS deleted file mode 100644 index f7a7acad..00000000 --- a/AUTHORS +++ /dev/null @@ -1,228 +0,0 @@ -Pygments is written and maintained by Georg Brandl . - -Major developers are Tim Hatch and Armin Ronacher -. - -Other contributors, listed alphabetically, are: - -* Sam Aaron -- Ioke lexer -* Ali Afshar -- image formatter -* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript - lexers -* Muthiah Annamalai -- Ezhil lexer -* Kumar Appaiah -- Debian control lexer -* Andreas Amann -- AppleScript lexer -* Timothy Armstrong -- Dart lexer fixes -* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers -* Jeremy Ashkenas -- CoffeeScript lexer -* José Joaquín Atria -- Praat lexer -* Stefan Matthias Aust -- Smalltalk lexer -* Lucas Bajolet -- Nit lexer -* Ben Bangert -- Mako lexers -* Max Battcher -- Darcs patch lexer -* Thomas Baruchel -- APL lexer -* Tim Baumann -- (Literate) Agda lexer -* Paul Baumgart, 280 North, Inc. -- Objective-J lexer -* Michael Bayer -- Myghty lexers -* Thomas Beale -- Archetype lexers -* John Benediktsson -- Factor lexer -* Trevor Bergeron -- mIRC formatter -* Vincent Bernat -- LessCSS lexer -* Christopher Bertels -- Fancy lexer -* Sébastien Bigaret -- QVT Operational lexer -* Jarrett Billingsley -- MiniD lexer -* Adam Blinkinsop -- Haskell, Redcode lexers -* Stéphane Blondon -- SGF lexer -* Frits van Bommel -- assembler lexers -* Pierre Bourdon -- bugfixes -* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter -* chebee7i -- Python traceback lexer improvements -* Hiram Chirino -- Scaml and Jade lexers -* Mauricio Caceres -- SAS and Stata lexers. -* Ian Cooper -- VGL lexer -* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers -* Leaf Corcoran -- MoonScript lexer -* Christopher Creutzig -- MuPAD lexer -* Daniël W. Crompton -- Pike lexer -* Pete Curry -- bugfixes -* Bryan Davis -- EBNF lexer -* Bruno Deferrari -- Shen lexer -* Giedrius Dubinskas -- HTML formatter improvements -* Owen Durni -- Haxe lexer -* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer -* James Edwards -- Terraform lexer -* Nick Efford -- Python 3 lexer -* Sven Efftinge -- Xtend lexer -* Artem Egorkine -- terminal256 formatter -* Matthew Fernandez -- CAmkES lexer -* Michael Ficarra -- CPSA lexer -* James H. Fisher -- PostScript lexer -* William S. Fulton -- SWIG lexer -* Carlos Galdino -- Elixir and Elixir Console lexers -* Michael Galloy -- IDL lexer -* Naveen Garg -- Autohotkey lexer -* Laurent Gautier -- R/S lexer -* Alex Gaynor -- PyPy log lexer -* Richard Gerkin -- Igor Pro lexer -* Alain Gilbert -- TypeScript lexer -* Alex Gilding -- BlitzBasic lexer -* Bertrand Goetzmann -- Groovy lexer -* Krzysiek Goj -- Scala lexer -* Andrey Golovizin -- BibTeX lexers -* Matt Good -- Genshi, Cheetah lexers -* Michał Górny -- vim modeline support -* Alex Gosse -- TrafficScript lexer -* Patrick Gotthardt -- PHP namespaces support -* Olivier Guibe -- Asymptote lexer -* Phil Hagelberg -- Fennel lexer -* Florian Hahn -- Boogie lexer -* Martin Harriman -- SNOBOL lexer -* Matthew Harrison -- SVG formatter -* Steven Hazel -- Tcl lexer -* Dan Michael Heggø -- Turtle lexer -* Aslak Hellesøy -- Gherkin lexer -* Greg Hendershott -- Racket lexer -* Justin Hendrick -- ParaSail lexer -* Jordi Gutiérrez Hermoso -- Octave lexer -* David Hess, Fish Software, Inc. -- Objective-J lexer -* Varun Hiremath -- Debian control lexer -* Rob Hoelz -- Perl 6 lexer -* Doug Hogan -- Mscgen lexer -* Ben Hollis -- Mason lexer -* Max Horn -- GAP lexer -* Alastair Houghton -- Lexer inheritance facility -* Tim Howard -- BlitzMax lexer -* Dustin Howett -- Logos lexer -* Ivan Inozemtsev -- Fantom lexer -* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session, - MSDOS session, BC, WDiff -* Brian R. Jackson -- Tea lexer -* Christian Jann -- ShellSession lexer -* Dennis Kaarsemaker -- sources.list lexer -* Dmitri Kabak -- Inferno Limbo lexer -* Igor Kalnitsky -- vhdl lexer -* Alexander Kit -- MaskJS lexer -* Pekka Klärck -- Robot Framework lexer -* Gerwin Klein -- Isabelle lexer -* Eric Knibbe -- Lasso lexer -* Stepan Koltsov -- Clay lexer -* Adam Koprowski -- Opa lexer -* Benjamin Kowarsch -- Modula-2 lexer -* Domen Kožar -- Nix lexer -* Oleh Krekel -- Emacs Lisp lexer -* Alexander Kriegisch -- Kconfig and AspectJ lexers -* Marek Kubica -- Scheme lexer -* Jochen Kupperschmidt -- Markdown processor -* Gerd Kurzbach -- Modelica lexer -* Jon Larimer, Google Inc. -- Smali lexer -* Olov Lassus -- Dart lexer -* Matt Layman -- TAP lexer -* Kristian Lyngstøl -- Varnish lexers -* Sylvestre Ledru -- Scilab lexer -* Chee Sing Lee -- Flatline lexer -* Mark Lee -- Vala lexer -* Valentin Lorentz -- C++ lexer improvements -* Ben Mabey -- Gherkin lexer -* Angus MacArthur -- QML lexer -* Louis Mandel -- X10 lexer -* Louis Marchand -- Eiffel lexer -* Simone Margaritelli -- Hybris lexer -* Kirk McDonald -- D lexer -* Gordon McGregor -- SystemVerilog lexer -* Stephen McKamey -- Duel/JBST lexer -* Brian McKenna -- F# lexer -* Charles McLaughlin -- Puppet lexer -* Kurt McKee -- Tera Term macro lexer -* Lukas Meuser -- BBCode formatter, Lua lexer -* Cat Miller -- Pig lexer -* Paul Miller -- LiveScript lexer -* Hong Minhee -- HTTP lexer -* Michael Mior -- Awk lexer -* Bruce Mitchener -- Dylan lexer rewrite -* Reuben Morais -- SourcePawn lexer -* Jon Morton -- Rust lexer -* Paulo Moura -- Logtalk lexer -* Mher Movsisyan -- DTD lexer -* Dejan Muhamedagic -- Crmsh lexer -* Ana Nelson -- Ragel, ANTLR, R console lexers -* Kurt Neufeld -- Markdown lexer -* Nam T. Nguyen -- Monokai style -* Jesper Noehr -- HTML formatter "anchorlinenos" -* Mike Nolta -- Julia lexer -* Jonas Obrist -- BBCode lexer -* Edward O'Callaghan -- Cryptol lexer -* David Oliva -- Rebol lexer -* Pat Pannuto -- nesC lexer -* Jon Parise -- Protocol buffers and Thrift lexers -* Benjamin Peterson -- Test suite refactoring -* Ronny Pfannschmidt -- BBCode lexer -* Dominik Picheta -- Nimrod lexer -* Andrew Pinkham -- RTF Formatter Refactoring -* Clément Prévost -- UrbiScript lexer -* Tanner Prynn -- cmdline -x option and loading lexers from files -* Oleh Prypin -- Crystal lexer (based on Ruby lexer) -* Elias Rabel -- Fortran fixed form lexer -* raichoo -- Idris lexer -* Kashif Rasul -- CUDA lexer -* Nathan Reed -- HLSL lexer -* Justin Reidy -- MXML lexer -* Norman Richards -- JSON lexer -* Corey Richardson -- Rust lexer updates -* Lubomir Rintel -- GoodData MAQL and CL lexers -* Andre Roberge -- Tango style -* Georg Rollinger -- HSAIL lexer -* Michiel Roos -- TypoScript lexer -* Konrad Rudolph -- LaTeX formatter enhancements -* Mario Ruggier -- Evoque lexers -* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements -* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers -* Matteo Sasso -- Common Lisp lexer -* Joe Schafer -- Ada lexer -* Ken Schutte -- Matlab lexers -* René Schwaiger -- Rainbow Dash style -* Sebastian Schweizer -- Whiley lexer -* Tassilo Schweyer -- Io, MOOCode lexers -* Ted Shaw -- AutoIt lexer -* Joerg Sieker -- ABAP lexer -* Robert Simmons -- Standard ML lexer -* Kirill Simonov -- YAML lexer -* Corbin Simpson -- Monte lexer -* Alexander Smishlajev -- Visual FoxPro lexer -* Steve Spigarelli -- XQuery lexer -* Jerome St-Louis -- eC lexer -* Camil Staps -- Clean and NuSMV lexers; Solarized style -* James Strachan -- Kotlin lexer -* Tom Stuart -- Treetop lexer -* Colin Sullivan -- SuperCollider lexer -* Ben Swift -- Extempore lexer -* Edoardo Tenani -- Arduino lexer -* Tiberius Teng -- default style overhaul -* Jeremy Thurgood -- Erlang, Squid config lexers -* Brian Tiffin -- OpenCOBOL lexer -* Bob Tolbert -- Hy lexer -* Matthias Trute -- Forth lexer -* Erick Tryzelaar -- Felix lexer -* Alexander Udalov -- Kotlin lexer improvements -* Thomas Van Doren -- Chapel lexer -* Daniele Varrazzo -- PostgreSQL lexers -* Abe Voelker -- OpenEdge ABL lexer -* Pepijn de Vos -- HTML formatter CTags support -* Matthias Vallentin -- Bro lexer -* Benoît Vinot -- AMPL lexer -* Linh Vu Hong -- RSL lexer -* Nathan Weizenbaum -- Haml and Sass lexers -* Nathan Whetsell -- Csound lexers -* Dietmar Winkler -- Modelica lexer -* Nils Winter -- Smalltalk lexer -* Davy Wybiral -- Clojure lexer -* Whitney Young -- ObjectiveC lexer -* Diego Zamboni -- CFengine3 lexer -* Enrique Zamudio -- Ceylon lexer -* Alex Zimin -- Nemerle lexer -* Rob Zimmerman -- Kal lexer -* Vincent Zurczak -- Roboconf lexer -* Rostyslav Golda -- FloScript lexer -* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers -* Simon Garnotel -- FreeFem++ lexer - -Many thanks for all contributions! diff --git a/CHANGES b/CHANGES deleted file mode 100644 index 4d87e121..00000000 --- a/CHANGES +++ /dev/null @@ -1,1325 +0,0 @@ -Pygments changelog -================== - -Issue numbers refer to the tracker at -, -pull request numbers to the requests at -. - -Version 2.5.0 -------------- - -- Added lexers: - - * Erlang, Elxir shells (PR#823, #1521) - * Zig (PR#820) - -- Updated lexers: - - * Handlebars (PR#773) - * YAML (#1528) - -- Bump ``NasmLexer`` priority over ``TasmLexer`` for ``.asm`` files - (fixes #1326) - - -Version 2.4.2 -------------- -(released May 28, 2019) - -- Fix encoding error when guessing lexer with given ``encoding`` option - (#1438) - - -Version 2.4.1 -------------- -(released May 24, 2019) - -- Updated lexers: - - * Coq (#1430) - * MSDOS Session (PR#734) - * NASM (#1517) - * Objective-C (PR#813, #1508) - * Prolog (#1511) - * TypeScript (#1515) - -- Support CSS variables in stylesheets (PR#814, #1356) -- Fix F# lexer name (PR#709) -- Fix ``TerminalFormatter`` using bold for bright text (#1480) - - -Version 2.4.0 -------------- -(released May 8, 2019) - -- Added lexers: - - * Augeas (PR#807) - * BBC Basic (PR#806) - * Boa (PR#756) - * Charm++ CI (PR#788) - * DASM16 (PR#807) - * FloScript (PR#750) - * FreeFem++ (PR#785) - * Hspec (PR#790) - * Pony (PR#627) - * SGF (PR#780) - * Slash (PR#807) - * Slurm (PR#760) - * Tera Term Language (PR#749) - * TOML (PR#807) - * Unicon (PR#731) - * VBScript (PR#673) - -- Updated lexers: - - * Apache2 (PR#766) - * Cypher (PR#746) - * LLVM (PR#792) - * Makefiles (PR#766) - * PHP (#1482) - * Rust - * SQL (PR#672) - * Stan (PR#774) - * Stata (PR#800) - * Terraform (PR#787) - * YAML - -- Add solarized style (PR#708) -- Add support for Markdown reference-style links (PR#753) -- Add license information to generated HTML/CSS files (#1496) -- Change ANSI color names (PR#777) -- Fix catastrophic backtracking in the bash lexer (#1494) -- Fix documentation failing to build using Sphinx 2.0 (#1501) -- Fix incorrect links in the Lisp and R lexer documentation (PR#775) -- Fix rare unicode errors on Python 2.7 (PR#798, #1492) -- Fix lexers popping from an empty stack (#1506) -- TypoScript uses ``.typoscript`` now (#1498) -- Updated Trove classifiers and ``pip`` requirements (PR#799) - - -Version 2.3.1 -------------- -(released Dec 16, 2018) - -- Updated lexers: - - * ASM (PR#784) - * Chapel (PR#735) - * Clean (PR#621) - * CSound (PR#684) - * Elm (PR#744) - * Fortran (PR#747) - * GLSL (PR#740) - * Haskell (PR#745) - * Hy (PR#754) - * Igor Pro (PR#764) - * PowerShell (PR#705) - * Python (PR#720, #1299, PR#715) - * SLexer (PR#680) - * YAML (PR#762, PR#724) - -- Fix invalid string escape sequences -- Fix `FutureWarning` introduced by regex changes in Python 3.7 - - -Version 2.3.0 -------------- -(released Nov 25, 2018) - -- Added lexers: - - * Fennel (PR#783) - * HLSL (PR#675) - -- Updated lexers: - - * Dockerfile (PR#714) - -- Minimum Python versions changed to 2.7 and 3.5 -- Added support for Python 3.7 generator changes (PR#772) -- Fix incorrect token type in SCSS for single-quote strings (#1322) -- Use `terminal256` formatter if `TERM` contains `256` (PR#666) -- Fix incorrect handling of GitHub style fences in Markdown (PR#741, #1389) -- Fix `%a` not being highlighted in Python3 strings (PR#727) - - -Version 2.2.0 -------------- -(released Jan 22, 2017) - -- Added lexers: - - * AMPL - * TypoScript (#1173) - * Varnish config (PR#554) - * Clean (PR#503) - * WDiff (PR#513) - * Flatline (PR#551) - * Silver (PR#537) - * HSAIL (PR#518) - * JSGF (PR#546) - * NCAR command language (PR#536) - * Extempore (PR#530) - * Cap'n Proto (PR#595) - * Whiley (PR#573) - * Monte (PR#592) - * Crystal (PR#576) - * Snowball (PR#589) - * CapDL (PR#579) - * NuSMV (PR#564) - * SAS, Stata (PR#593) - -- Added the ability to load lexer and formatter classes directly from files - with the `-x` command line option and the `lexers.load_lexer_from_file()` - and `formatters.load_formatter_from_file()` functions. (PR#559) - -- Added `lexers.find_lexer_class_by_name()`. (#1203) - -- Added new token types and lexing for magic methods and variables in Python - and PHP. - -- Added a new token type for string affixes and lexing for them in Python, C++ - and Postgresql lexers. - -- Added a new token type for heredoc (and similar) string delimiters and - lexing for them in C++, Perl, PHP, Postgresql and Ruby lexers. - -- Styles can now define colors with ANSI colors for use in the 256-color - terminal formatter. (PR#531) - -- Improved the CSS lexer. (#1083, #1130) - -- Added "Rainbow Dash" style. (PR#623) - -- Delay loading `pkg_resources`, which takes a long while to import. (PR#690) - - -Version 2.1.3 -------------- -(released Mar 2, 2016) - -- Fixed regression in Bash lexer (PR#563) - - -Version 2.1.2 -------------- -(released Feb 29, 2016) - -- Fixed Python 3 regression in image formatter (#1215) -- Fixed regression in Bash lexer (PR#562) - - -Version 2.1.1 -------------- -(relased Feb 14, 2016) - -- Fixed Jython compatibility (#1205) -- Fixed HTML formatter output with leading empty lines (#1111) -- Added a mapping table for LaTeX encodings and added utf8 (#1152) -- Fixed image formatter font searching on Macs (#1188) -- Fixed deepcopy-ing of Token instances (#1168) -- Fixed Julia string interpolation (#1170) -- Fixed statefulness of HttpLexer between get_tokens calls -- Many smaller fixes to various lexers - - -Version 2.1 ------------ -(released Jan 17, 2016) - -- Added lexers: - - * Emacs Lisp (PR#431) - * Arduino (PR#442) - * Modula-2 with multi-dialect support (#1090) - * Fortran fixed format (PR#213) - * Archetype Definition language (PR#483) - * Terraform (PR#432) - * Jcl, Easytrieve (PR#208) - * ParaSail (PR#381) - * Boogie (PR#420) - * Turtle (PR#425) - * Fish Shell (PR#422) - * Roboconf (PR#449) - * Test Anything Protocol (PR#428) - * Shen (PR#385) - * Component Pascal (PR#437) - * SuperCollider (PR#472) - * Shell consoles (Tcsh, PowerShell, MSDOS) (PR#479) - * Elm and J (PR#452) - * Crmsh (PR#440) - * Praat (PR#492) - * CSound (PR#494) - * Ezhil (PR#443) - * Thrift (PR#469) - * QVT Operational (PR#204) - * Hexdump (PR#508) - * CAmkES Configuration (PR#462) - -- Added styles: - - * Lovelace (PR#456) - * Algol and Algol-nu (#1090) - -- Added formatters: - - * IRC (PR#458) - * True color (24-bit) terminal ANSI sequences (#1142) - (formatter alias: "16m") - -- New "filename" option for HTML formatter (PR#527). - -- Improved performance of the HTML formatter for long lines (PR#504). - -- Updated autopygmentize script (PR#445). - -- Fixed style inheritance for non-standard token types in HTML output. - -- Added support for async/await to Python 3 lexer. - -- Rewrote linenos option for TerminalFormatter (it's better, but slightly - different output than before) (#1147). - -- Javascript lexer now supports most of ES6 (#1100). - -- Cocoa builtins updated for iOS 8.1 (PR#433). - -- Combined BashSessionLexer and ShellSessionLexer, new version should support - the prompt styles of either. - -- Added option to pygmentize to show a full traceback on exceptions. - -- Fixed incomplete output on Windows and Python 3 (e.g. when using iPython - Notebook) (#1153). - -- Allowed more traceback styles in Python console lexer (PR#253). - -- Added decorators to TypeScript (PR#509). - -- Fix highlighting of certain IRC logs formats (#1076). - - -Version 2.0.2 -------------- -(released Jan 20, 2015) - -- Fix Python tracebacks getting duplicated in the console lexer (#1068). - -- Backquote-delimited identifiers are now recognized in F# (#1062). - - -Version 2.0.1 -------------- -(released Nov 10, 2014) - -- Fix an encoding issue when using ``pygmentize`` with the ``-o`` option. - - -Version 2.0 ------------ -(released Nov 9, 2014) - -- Default lexer encoding is now "guess", i.e. UTF-8 / Locale / Latin1 is - tried in that order. - -- Major update to Swift lexer (PR#410). - -- Multiple fixes to lexer guessing in conflicting cases: - - * recognize HTML5 by doctype - * recognize XML by XML declaration - * don't recognize C/C++ as SystemVerilog - -- Simplified regexes and builtin lists. - - -Version 2.0rc1 --------------- -(released Oct 16, 2014) - -- Dropped Python 2.4 and 2.5 compatibility. This is in favor of single-source - compatibility between Python 2.6, 2.7 and 3.3+. - -- New website and documentation based on Sphinx (finally!) - -- Lexers added: - - * APL (#969) - * Agda and Literate Agda (PR#203) - * Alloy (PR#355) - * AmbientTalk - * BlitzBasic (PR#197) - * ChaiScript (PR#24) - * Chapel (PR#256) - * Cirru (PR#275) - * Clay (PR#184) - * ColdFusion CFC (PR#283) - * Cryptol and Literate Cryptol (PR#344) - * Cypher (PR#257) - * Docker config files - * EBNF (PR#193) - * Eiffel (PR#273) - * GAP (PR#311) - * Golo (PR#309) - * Handlebars (PR#186) - * Hy (PR#238) - * Idris and Literate Idris (PR#210) - * Igor Pro (PR#172) - * Inform 6/7 (PR#281) - * Intel objdump (PR#279) - * Isabelle (PR#386) - * Jasmin (PR#349) - * JSON-LD (PR#289) - * Kal (PR#233) - * Lean (PR#399) - * LSL (PR#296) - * Limbo (PR#291) - * Liquid (#977) - * MQL (PR#285) - * MaskJS (PR#280) - * Mozilla preprocessors - * Mathematica (PR#245) - * NesC (PR#166) - * Nit (PR#375) - * Nix (PR#267) - * Pan - * Pawn (PR#211) - * Perl 6 (PR#181) - * Pig (PR#304) - * Pike (PR#237) - * QBasic (PR#182) - * Red (PR#341) - * ResourceBundle (#1038) - * Rexx (PR#199) - * Rql (PR#251) - * Rsl - * SPARQL (PR#78) - * Slim (PR#366) - * Swift (PR#371) - * Swig (PR#168) - * TADS 3 (PR#407) - * Todo.txt todo lists - * Twig (PR#404) - -- Added a helper to "optimize" regular expressions that match one of many - literal words; this can save 20% and more lexing time with lexers that - highlight many keywords or builtins. - -- New styles: "xcode" and "igor", similar to the default highlighting of - the respective IDEs. - -- The command-line "pygmentize" tool now tries a little harder to find the - correct encoding for files and the terminal (#979). - -- Added "inencoding" option for lexers to override "encoding" analogous - to "outencoding" (#800). - -- Added line-by-line "streaming" mode for pygmentize with the "-s" option. - (PR#165) Only fully works for lexers that have no constructs spanning - lines! - -- Added an "envname" option to the LaTeX formatter to select a replacement - verbatim environment (PR#235). - -- Updated the Makefile lexer to yield a little more useful highlighting. - -- Lexer aliases passed to ``get_lexer_by_name()`` are now case-insensitive. - -- File name matching in lexers and formatters will now use a regex cache - for speed (PR#205). - -- Pygments will now recognize "vim" modelines when guessing the lexer for - a file based on content (PR#118). - -- Major restructure of the ``pygments.lexers`` module namespace. There are now - many more modules with less lexers per module. Old modules are still around - and re-export the lexers they previously contained. - -- The NameHighlightFilter now works with any Name.* token type (#790). - -- Python 3 lexer: add new exceptions from PEP 3151. - -- Opa lexer: add new keywords (PR#170). - -- Julia lexer: add keywords and underscore-separated number - literals (PR#176). - -- Lasso lexer: fix method highlighting, update builtins. Fix - guessing so that plain XML isn't always taken as Lasso (PR#163). - -- Objective C/C++ lexers: allow "@" prefixing any expression (#871). - -- Ruby lexer: fix lexing of Name::Space tokens (#860) and of symbols - in hashes (#873). - -- Stan lexer: update for version 2.4.0 of the language (PR#162, PR#255, PR#377). - -- JavaScript lexer: add the "yield" keyword (PR#196). - -- HTTP lexer: support for PATCH method (PR#190). - -- Koka lexer: update to newest language spec (PR#201). - -- Haxe lexer: rewrite and support for Haxe 3 (PR#174). - -- Prolog lexer: add different kinds of numeric literals (#864). - -- F# lexer: rewrite with newest spec for F# 3.0 (#842), fix a bug with - dotted chains (#948). - -- Kotlin lexer: general update (PR#271). - -- Rebol lexer: fix comment detection and analyse_text (PR#261). - -- LLVM lexer: update keywords to v3.4 (PR#258). - -- PHP lexer: add new keywords and binary literals (PR#222). - -- external/markdown-processor.py updated to newest python-markdown (PR#221). - -- CSS lexer: some highlighting order fixes (PR#231). - -- Ceylon lexer: fix parsing of nested multiline comments (#915). - -- C family lexers: fix parsing of indented preprocessor directives (#944). - -- Rust lexer: update to 0.9 language version (PR#270, PR#388). - -- Elixir lexer: update to 0.15 language version (PR#392). - -- Fix swallowing incomplete tracebacks in Python console lexer (#874). - - -Version 1.6 ------------ -(released Feb 3, 2013) - -- Lexers added: - - * Dylan console (PR#149) - * Logos (PR#150) - * Shell sessions (PR#158) - -- Fix guessed lexers not receiving lexer options (#838). - -- Fix unquoted HTML attribute lexing in Opa (#841). - -- Fixes to the Dart lexer (PR#160). - - -Version 1.6rc1 --------------- -(released Jan 9, 2013) - -- Lexers added: - - * AspectJ (PR#90) - * AutoIt (PR#122) - * BUGS-like languages (PR#89) - * Ceylon (PR#86) - * Croc (new name for MiniD) - * CUDA (PR#75) - * Dg (PR#116) - * IDL (PR#115) - * Jags (PR#89) - * Julia (PR#61) - * Kconfig (#711) - * Lasso (PR#95, PR#113) - * LiveScript (PR#84) - * Monkey (PR#117) - * Mscgen (PR#80) - * NSIS scripts (PR#136) - * OpenCOBOL (PR#72) - * QML (PR#123) - * Puppet (PR#133) - * Racket (PR#94) - * Rdoc (PR#99) - * Robot Framework (PR#137) - * RPM spec files (PR#124) - * Rust (PR#67) - * Smali (Dalvik assembly) - * SourcePawn (PR#39) - * Stan (PR#89) - * Treetop (PR#125) - * TypeScript (PR#114) - * VGL (PR#12) - * Visual FoxPro (#762) - * Windows Registry (#819) - * Xtend (PR#68) - -- The HTML formatter now supports linking to tags using CTags files, when the - python-ctags package is installed (PR#87). - -- The HTML formatter now has a "linespans" option that wraps every line in a - tag with a specific id (PR#82). - -- When deriving a lexer from another lexer with token definitions, definitions - for states not in the child lexer are now inherited. If you override a state - in the child lexer, an "inherit" keyword has been added to insert the base - state at that position (PR#141). - -- The C family lexers now inherit token definitions from a common base class, - removing code duplication (PR#141). - -- Use "colorama" on Windows for console color output (PR#142). - -- Fix Template Haskell highlighting (PR#63). - -- Fix some S/R lexer errors (PR#91). - -- Fix a bug in the Prolog lexer with names that start with 'is' (#810). - -- Rewrite Dylan lexer, add Dylan LID lexer (PR#147). - -- Add a Java quickstart document (PR#146). - -- Add a "external/autopygmentize" file that can be used as .lessfilter (#802). - - -Version 1.5 ------------ -(codename Zeitdilatation, released Mar 10, 2012) - -- Lexers added: - - * Awk (#630) - * Fancy (#633) - * PyPy Log - * eC - * Nimrod - * Nemerle (#667) - * F# (#353) - * Groovy (#501) - * PostgreSQL (#660) - * DTD - * Gosu (#634) - * Octave (PR#22) - * Standard ML (PR#14) - * CFengine3 (#601) - * Opa (PR#37) - * HTTP sessions (PR#42) - * JSON (PR#31) - * SNOBOL (PR#30) - * MoonScript (PR#43) - * ECL (PR#29) - * Urbiscript (PR#17) - * OpenEdge ABL (PR#27) - * SystemVerilog (PR#35) - * Coq (#734) - * PowerShell (#654) - * Dart (#715) - * Fantom (PR#36) - * Bro (PR#5) - * NewLISP (PR#26) - * VHDL (PR#45) - * Scilab (#740) - * Elixir (PR#57) - * Tea (PR#56) - * Kotlin (PR#58) - -- Fix Python 3 terminal highlighting with pygmentize (#691). - -- In the LaTeX formatter, escape special &, < and > chars (#648). - -- In the LaTeX formatter, fix display problems for styles with token - background colors (#670). - -- Enhancements to the Squid conf lexer (#664). - -- Several fixes to the reStructuredText lexer (#636). - -- Recognize methods in the ObjC lexer (#638). - -- Fix Lua "class" highlighting: it does not have classes (#665). - -- Fix degenerate regex in Scala lexer (#671) and highlighting bugs (#713, 708). - -- Fix number pattern order in Ocaml lexer (#647). - -- Fix generic type highlighting in ActionScript 3 (#666). - -- Fixes to the Clojure lexer (PR#9). - -- Fix degenerate regex in Nemerle lexer (#706). - -- Fix infinite looping in CoffeeScript lexer (#729). - -- Fix crashes and analysis with ObjectiveC lexer (#693, #696). - -- Add some Fortran 2003 keywords. - -- Fix Boo string regexes (#679). - -- Add "rrt" style (#727). - -- Fix infinite looping in Darcs Patch lexer. - -- Lots of misc fixes to character-eating bugs and ordering problems in many - different lexers. - - -Version 1.4 ------------ -(codename Unschärfe, released Jan 03, 2011) - -- Lexers added: - - * Factor (#520) - * PostScript (#486) - * Verilog (#491) - * BlitzMax Basic (#478) - * Ioke (#465) - * Java properties, split out of the INI lexer (#445) - * Scss (#509) - * Duel/JBST - * XQuery (#617) - * Mason (#615) - * GoodData (#609) - * SSP (#473) - * Autohotkey (#417) - * Google Protocol Buffers - * Hybris (#506) - -- Do not fail in analyse_text methods (#618). - -- Performance improvements in the HTML formatter (#523). - -- With the ``noclasses`` option in the HTML formatter, some styles - present in the stylesheet were not added as inline styles. - -- Four fixes to the Lua lexer (#480, #481, #482, #497). - -- More context-sensitive Gherkin lexer with support for more i18n translations. - -- Support new OO keywords in Matlab lexer (#521). - -- Small fix in the CoffeeScript lexer (#519). - -- A bugfix for backslashes in ocaml strings (#499). - -- Fix unicode/raw docstrings in the Python lexer (#489). - -- Allow PIL to work without PIL.pth (#502). - -- Allow seconds as a unit in CSS (#496). - -- Support ``application/javascript`` as a JavaScript mime type (#504). - -- Support `Offload `_ C++ Extensions as - keywords in the C++ lexer (#484). - -- Escape more characters in LaTeX output (#505). - -- Update Haml/Sass lexers to version 3 (#509). - -- Small PHP lexer string escaping fix (#515). - -- Support comments before preprocessor directives, and unsigned/ - long long literals in C/C++ (#613, #616). - -- Support line continuations in the INI lexer (#494). - -- Fix lexing of Dylan string and char literals (#628). - -- Fix class/procedure name highlighting in VB.NET lexer (#624). - - -Version 1.3.1 -------------- -(bugfix release, released Mar 05, 2010) - -- The ``pygmentize`` script was missing from the distribution. - - -Version 1.3 ------------ -(codename Schneeglöckchen, released Mar 01, 2010) - -- Added the ``ensurenl`` lexer option, which can be used to suppress the - automatic addition of a newline to the lexer input. - -- Lexers added: - - * Ada - * Coldfusion - * Modula-2 - * Haxe - * R console - * Objective-J - * Haml and Sass - * CoffeeScript - -- Enhanced reStructuredText highlighting. - -- Added support for PHP 5.3 namespaces in the PHP lexer. - -- Added a bash completion script for `pygmentize`, to the external/ - directory (#466). - -- Fixed a bug in `do_insertions()` used for multi-lexer languages. - -- Fixed a Ruby regex highlighting bug (#476). - -- Fixed regex highlighting bugs in Perl lexer (#258). - -- Add small enhancements to the C lexer (#467) and Bash lexer (#469). - -- Small fixes for the Tcl, Debian control file, Nginx config, - Smalltalk, Objective-C, Clojure, Lua lexers. - -- Gherkin lexer: Fixed single apostrophe bug and added new i18n keywords. - - -Version 1.2.2 -------------- -(bugfix release, released Jan 02, 2010) - -* Removed a backwards incompatibility in the LaTeX formatter that caused - Sphinx to produce invalid commands when writing LaTeX output (#463). - -* Fixed a forever-backtracking regex in the BashLexer (#462). - - -Version 1.2.1 -------------- -(bugfix release, released Jan 02, 2010) - -* Fixed mishandling of an ellipsis in place of the frames in a Python - console traceback, resulting in clobbered output. - - -Version 1.2 ------------ -(codename Neujahr, released Jan 01, 2010) - -- Dropped Python 2.3 compatibility. - -- Lexers added: - - * Asymptote - * Go - * Gherkin (Cucumber) - * CMake - * Ooc - * Coldfusion - * Haxe - * R console - -- Added options for rendering LaTeX in source code comments in the - LaTeX formatter (#461). - -- Updated the Logtalk lexer. - -- Added `line_number_start` option to image formatter (#456). - -- Added `hl_lines` and `hl_color` options to image formatter (#457). - -- Fixed the HtmlFormatter's handling of noclasses=True to not output any - classes (#427). - -- Added the Monokai style (#453). - -- Fixed LLVM lexer identifier syntax and added new keywords (#442). - -- Fixed the PythonTracebackLexer to handle non-traceback data in header or - trailer, and support more partial tracebacks that start on line 2 (#437). - -- Fixed the CLexer to not highlight ternary statements as labels. - -- Fixed lexing of some Ruby quoting peculiarities (#460). - -- A few ASM lexer fixes (#450). - - -Version 1.1.1 -------------- -(bugfix release, released Sep 15, 2009) - -- Fixed the BBCode lexer (#435). - -- Added support for new Jinja2 keywords. - -- Fixed test suite failures. - -- Added Gentoo-specific suffixes to Bash lexer. - - -Version 1.1 ------------ -(codename Brillouin, released Sep 11, 2009) - -- Ported Pygments to Python 3. This needed a few changes in the way - encodings are handled; they may affect corner cases when used with - Python 2 as well. - -- Lexers added: - - * Antlr/Ragel, thanks to Ana Nelson - * (Ba)sh shell - * Erlang shell - * GLSL - * Prolog - * Evoque - * Modelica - * Rebol - * MXML - * Cython - * ABAP - * ASP.net (VB/C#) - * Vala - * Newspeak - -- Fixed the LaTeX formatter's output so that output generated for one style - can be used with the style definitions of another (#384). - -- Added "anchorlinenos" and "noclobber_cssfile" (#396) options to HTML - formatter. - -- Support multiline strings in Lua lexer. - -- Rewrite of the JavaScript lexer by Pumbaa80 to better support regular - expression literals (#403). - -- When pygmentize is asked to highlight a file for which multiple lexers - match the filename, use the analyse_text guessing engine to determine the - winner (#355). - -- Fixed minor bugs in the JavaScript lexer (#383), the Matlab lexer (#378), - the Scala lexer (#392), the INI lexer (#391), the Clojure lexer (#387) - and the AS3 lexer (#389). - -- Fixed three Perl heredoc lexing bugs (#379, #400, #422). - -- Fixed a bug in the image formatter which misdetected lines (#380). - -- Fixed bugs lexing extended Ruby strings and regexes. - -- Fixed a bug when lexing git diffs. - -- Fixed a bug lexing the empty commit in the PHP lexer (#405). - -- Fixed a bug causing Python numbers to be mishighlighted as floats (#397). - -- Fixed a bug when backslashes are used in odd locations in Python (#395). - -- Fixed various bugs in Matlab and S-Plus lexers, thanks to Winston Chang (#410, - #411, #413, #414) and fmarc (#419). - -- Fixed a bug in Haskell single-line comment detection (#426). - -- Added new-style reStructuredText directive for docutils 0.5+ (#428). - - -Version 1.0 ------------ -(codename Dreiundzwanzig, released Nov 23, 2008) - -- Don't use join(splitlines()) when converting newlines to ``\n``, - because that doesn't keep all newlines at the end when the - ``stripnl`` lexer option is False. - -- Added ``-N`` option to command-line interface to get a lexer name - for a given filename. - -- Added Tango style, written by Andre Roberge for the Crunchy project. - -- Added Python3TracebackLexer and ``python3`` option to - PythonConsoleLexer. - -- Fixed a few bugs in the Haskell lexer. - -- Fixed PythonTracebackLexer to be able to recognize SyntaxError and - KeyboardInterrupt (#360). - -- Provide one formatter class per image format, so that surprises like:: - - pygmentize -f gif -o foo.gif foo.py - - creating a PNG file are avoided. - -- Actually use the `font_size` option of the image formatter. - -- Fixed numpy lexer that it doesn't listen for `*.py` any longer. - -- Fixed HTML formatter so that text options can be Unicode - strings (#371). - -- Unified Diff lexer supports the "udiff" alias now. - -- Fixed a few issues in Scala lexer (#367). - -- RubyConsoleLexer now supports simple prompt mode (#363). - -- JavascriptLexer is smarter about what constitutes a regex (#356). - -- Add Applescript lexer, thanks to Andreas Amann (#330). - -- Make the codetags more strict about matching words (#368). - -- NginxConfLexer is a little more accurate on mimetypes and - variables (#370). - - -Version 0.11.1 --------------- -(released Aug 24, 2008) - -- Fixed a Jython compatibility issue in pygments.unistring (#358). - - -Version 0.11 ------------- -(codename Straußenei, released Aug 23, 2008) - -Many thanks go to Tim Hatch for writing or integrating most of the bug -fixes and new features. - -- Lexers added: - - * Nasm-style assembly language, thanks to delroth - * YAML, thanks to Kirill Simonov - * ActionScript 3, thanks to Pierre Bourdon - * Cheetah/Spitfire templates, thanks to Matt Good - * Lighttpd config files - * Nginx config files - * Gnuplot plotting scripts - * Clojure - * POV-Ray scene files - * Sqlite3 interactive console sessions - * Scala source files, thanks to Krzysiek Goj - -- Lexers improved: - - * C lexer highlights standard library functions now and supports C99 - types. - * Bash lexer now correctly highlights heredocs without preceding - whitespace. - * Vim lexer now highlights hex colors properly and knows a couple - more keywords. - * Irc logs lexer now handles xchat's default time format (#340) and - correctly highlights lines ending in ``>``. - * Support more delimiters for perl regular expressions (#258). - * ObjectiveC lexer now supports 2.0 features. - -- Added "Visual Studio" style. - -- Updated markdown processor to Markdown 1.7. - -- Support roman/sans/mono style defs and use them in the LaTeX - formatter. - -- The RawTokenFormatter is no longer registered to ``*.raw`` and it's - documented that tokenization with this lexer may raise exceptions. - -- New option ``hl_lines`` to HTML formatter, to highlight certain - lines. - -- New option ``prestyles`` to HTML formatter. - -- New option *-g* to pygmentize, to allow lexer guessing based on - filetext (can be slowish, so file extensions are still checked - first). - -- ``guess_lexer()`` now makes its decision much faster due to a cache - of whether data is xml-like (a check which is used in several - versions of ``analyse_text()``. Several lexers also have more - accurate ``analyse_text()`` now. - - -Version 0.10 ------------- -(codename Malzeug, released May 06, 2008) - -- Lexers added: - - * Io - * Smalltalk - * Darcs patches - * Tcl - * Matlab - * Matlab sessions - * FORTRAN - * XSLT - * tcsh - * NumPy - * Python 3 - * S, S-plus, R statistics languages - * Logtalk - -- In the LatexFormatter, the *commandprefix* option is now by default - 'PY' instead of 'C', since the latter resulted in several collisions - with other packages. Also, the special meaning of the *arg* - argument to ``get_style_defs()`` was removed. - -- Added ImageFormatter, to format code as PNG, JPG, GIF or BMP. - (Needs the Python Imaging Library.) - -- Support doc comments in the PHP lexer. - -- Handle format specifications in the Perl lexer. - -- Fix comment handling in the Batch lexer. - -- Add more file name extensions for the C++, INI and XML lexers. - -- Fixes in the IRC and MuPad lexers. - -- Fix function and interface name highlighting in the Java lexer. - -- Fix at-rule handling in the CSS lexer. - -- Handle KeyboardInterrupts gracefully in pygmentize. - -- Added BlackWhiteStyle. - -- Bash lexer now correctly highlights math, does not require - whitespace after semicolons, and correctly highlights boolean - operators. - -- Makefile lexer is now capable of handling BSD and GNU make syntax. - - -Version 0.9 ------------ -(codename Herbstzeitlose, released Oct 14, 2007) - -- Lexers added: - - * Erlang - * ActionScript - * Literate Haskell - * Common Lisp - * Various assembly languages - * Gettext catalogs - * Squid configuration - * Debian control files - * MySQL-style SQL - * MOOCode - -- Lexers improved: - - * Greatly improved the Haskell and OCaml lexers. - * Improved the Bash lexer's handling of nested constructs. - * The C# and Java lexers exhibited abysmal performance with some - input code; this should now be fixed. - * The IRC logs lexer is now able to colorize weechat logs too. - * The Lua lexer now recognizes multi-line comments. - * Fixed bugs in the D and MiniD lexer. - -- The encoding handling of the command line mode (pygmentize) was - enhanced. You shouldn't get UnicodeErrors from it anymore if you - don't give an encoding option. - -- Added a ``-P`` option to the command line mode which can be used to - give options whose values contain commas or equals signs. - -- Added 256-color terminal formatter. - -- Added an experimental SVG formatter. - -- Added the ``lineanchors`` option to the HTML formatter, thanks to - Ian Charnas for the idea. - -- Gave the line numbers table a CSS class in the HTML formatter. - -- Added a Vim 7-like style. - - -Version 0.8.1 -------------- -(released Jun 27, 2007) - -- Fixed POD highlighting in the Ruby lexer. - -- Fixed Unicode class and namespace name highlighting in the C# lexer. - -- Fixed Unicode string prefix highlighting in the Python lexer. - -- Fixed a bug in the D and MiniD lexers. - -- Fixed the included MoinMoin parser. - - -Version 0.8 ------------ -(codename Maikäfer, released May 30, 2007) - -- Lexers added: - - * Haskell, thanks to Adam Blinkinsop - * Redcode, thanks to Adam Blinkinsop - * D, thanks to Kirk McDonald - * MuPad, thanks to Christopher Creutzig - * MiniD, thanks to Jarrett Billingsley - * Vim Script, by Tim Hatch - -- The HTML formatter now has a second line-numbers mode in which it - will just integrate the numbers in the same ``
`` tag as the
-  code.
-
-- The `CSharpLexer` now is Unicode-aware, which means that it has an
-  option that can be set so that it correctly lexes Unicode
-  identifiers allowed by the C# specs.
-
-- Added a `RaiseOnErrorTokenFilter` that raises an exception when the
-  lexer generates an error token, and a `VisibleWhitespaceFilter` that
-  converts whitespace (spaces, tabs, newlines) into visible
-  characters.
-
-- Fixed the `do_insertions()` helper function to yield correct
-  indices.
-
-- The ReST lexer now automatically highlights source code blocks in
-  ".. sourcecode:: language" and ".. code:: language" directive
-  blocks.
-
-- Improved the default style (thanks to Tiberius Teng). The old
-  default is still available as the "emacs" style (which was an alias
-  before).
-
-- The `get_style_defs` method of HTML formatters now uses the
-  `cssclass` option as the default selector if it was given.
-
-- Improved the ReST and Bash lexers a bit.
-
-- Fixed a few bugs in the Makefile and Bash lexers, thanks to Tim
-  Hatch.
-
-- Fixed a bug in the command line code that disallowed ``-O`` options
-  when using the ``-S`` option.
-
-- Fixed a bug in the `RawTokenFormatter`.
-
-
-Version 0.7.1
--------------
-(released Feb 15, 2007)
-
-- Fixed little highlighting bugs in the Python, Java, Scheme and
-  Apache Config lexers.
-
-- Updated the included manpage.
-
-- Included a built version of the documentation in the source tarball.
-
-
-Version 0.7
------------
-(codename Faschingskrapfn, released Feb 14, 2007)
-
-- Added a MoinMoin parser that uses Pygments. With it, you get
-  Pygments highlighting in Moin Wiki pages.
-
-- Changed the exception raised if no suitable lexer, formatter etc. is
-  found in one of the `get_*_by_*` functions to a custom exception,
-  `pygments.util.ClassNotFound`. It is, however, a subclass of
-  `ValueError` in order to retain backwards compatibility.
-
-- Added a `-H` command line option which can be used to get the
-  docstring of a lexer, formatter or filter.
-
-- Made the handling of lexers and formatters more consistent. The
-  aliases and filename patterns of formatters are now attributes on
-  them.
-
-- Added an OCaml lexer, thanks to Adam Blinkinsop.
-
-- Made the HTML formatter more flexible, and easily subclassable in
-  order to make it easy to implement custom wrappers, e.g. alternate
-  line number markup. See the documentation.
-
-- Added an `outencoding` option to all formatters, making it possible
-  to override the `encoding` (which is used by lexers and formatters)
-  when using the command line interface. Also, if using the terminal
-  formatter and the output file is a terminal and has an encoding
-  attribute, use it if no encoding is given.
-
-- Made it possible to just drop style modules into the `styles`
-  subpackage of the Pygments installation.
-
-- Added a "state" keyword argument to the `using` helper.
-
-- Added a `commandprefix` option to the `LatexFormatter` which allows
-  to control how the command names are constructed.
-
-- Added quite a few new lexers, thanks to Tim Hatch:
-
-  * Java Server Pages
-  * Windows batch files
-  * Trac Wiki markup
-  * Python tracebacks
-  * ReStructuredText
-  * Dylan
-  * and the Befunge esoteric programming language (yay!)
-
-- Added Mako lexers by Ben Bangert.
-
-- Added "fruity" style, another dark background originally vim-based
-  theme.
-
-- Added sources.list lexer by Dennis Kaarsemaker.
-
-- Added token stream filters, and a pygmentize option to use them.
-
-- Changed behavior of `in` Operator for tokens.
-
-- Added mimetypes for all lexers.
-
-- Fixed some problems lexing Python strings.
-
-- Fixed tickets: #167, #178, #179, #180, #185, #201.
-
-
-Version 0.6
------------
-(codename Zimtstern, released Dec 20, 2006)
-
-- Added option for the HTML formatter to write the CSS to an external
-  file in "full document" mode.
-
-- Added RTF formatter.
-
-- Added Bash and Apache configuration lexers (thanks to Tim Hatch).
-
-- Improved guessing methods for various lexers.
-
-- Added `@media` support to CSS lexer (thanks to Tim Hatch).
-
-- Added a Groff lexer (thanks to Tim Hatch).
-
-- License change to BSD.
-
-- Added lexers for the Myghty template language.
-
-- Added a Scheme lexer (thanks to Marek Kubica).
-
-- Added some functions to iterate over existing lexers, formatters and
-  lexers.
-
-- The HtmlFormatter's `get_style_defs()` can now take a list as an
-  argument to generate CSS with multiple prefixes.
-
-- Support for guessing input encoding added.
-
-- Encoding support added: all processing is now done with Unicode
-  strings, input and output are converted from and optionally to byte
-  strings (see the ``encoding`` option of lexers and formatters).
-
-- Some improvements in the C(++) lexers handling comments and line
-  continuations.
-
-
-Version 0.5.1
--------------
-(released Oct 30, 2006)
-
-- Fixed traceback in ``pygmentize -L`` (thanks to Piotr Ozarowski).
-
-
-Version 0.5
------------
-(codename PyKleur, released Oct 30, 2006)
-
-- Initial public release.
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index 13d1c74b..00000000
--- a/LICENSE
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) 2006-2019 by the respective authors (see AUTHORS file).
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-* Redistributions of source code must retain the above copyright
-  notice, this list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright
-  notice, this list of conditions and the following disclaimer in the
-  documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index 7e1d320d..00000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include pygmentize
-include external/*
-include Makefile CHANGES LICENSE AUTHORS TODO
-recursive-include tests *
-recursive-include doc *
-recursive-include scripts *
diff --git a/Makefile b/Makefile
deleted file mode 100644
index 2fcb832f..00000000
--- a/Makefile
+++ /dev/null
@@ -1,71 +0,0 @@
-#
-# Makefile for Pygments
-# ~~~~~~~~~~~~~~~~~~~~~
-#
-# Combines scripts for common tasks.
-#
-# :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
-# :license: BSD, see LICENSE for details.
-#
-
-PYTHON ?= python
-
-export PYTHONPATH = $(shell echo "$$PYTHONPATH"):$(shell python -c 'import os; print ":".join(os.path.abspath(line.strip()) for line in file("PYTHONPATH"))' 2>/dev/null)
-
-.PHONY: all check clean clean-pyc codetags docs mapfiles \
-	pylint reindent test test-coverage
-
-all: clean-pyc check test
-
-check:
-	@$(PYTHON) scripts/detect_missing_analyse_text.py || true
-	@pyflakes pygments | grep -v 'but unused' || true
-	@$(PYTHON) scripts/check_sources.py -i build -i dist -i pygments/lexers/_mapping.py \
-		   -i docs/build -i pygments/formatters/_mapping.py -i pygments/unistring.py
-
-clean: clean-pyc
-	-rm -rf build
-	-rm -f codetags.html
-
-clean-pyc:
-	find . -name '*.pyc' -exec rm -f {} +
-	find . -name '*.pyo' -exec rm -f {} +
-	find . -name '*~' -exec rm -f {} +
-
-codetags:
-	@$(PYTHON) scripts/find_codetags.py -i tests/examplefiles -i scripts/pylintrc \
-		   -i scripts/find_codetags.py -o codetags.html .
-
-docs:
-	make -C doc html
-
-mapfiles:
-	(cd pygments/formatters; $(PYTHON) _mapping.py)
-	(cd pygments/lexers; $(PYTHON) _mapping.py)
-
-pylint:
-	@pylint --rcfile scripts/pylintrc pygments
-
-reindent:
-	@$(PYTHON) scripts/reindent.py -r -B .
-
-test:
-	@$(PYTHON) tests/run.py -d $(TEST)
-
-test-coverage:
-	@$(PYTHON) tests/run.py -d --with-coverage --cover-package=pygments --cover-erase $(TEST)
-
-test-examplefiles:
-	nosetests tests/test_examplefiles.py
-
-tox-test:
-	@tox -- $(TEST)
-
-tox-test-coverage:
-	@tox -- --with-coverage --cover-package=pygments --cover-erase $(TEST)
-
-RLMODULES = pygments.lexers
-
-regexlint:
-	@if [ -z "$(REGEXLINT)" ]; then echo "Please set REGEXLINT=checkout path"; exit 1; fi
-	PYTHONPATH=`pwd`:$(REGEXLINT) $(REGEXLINT)/regexlint/cmdline.py $(RLMODULES)
diff --git a/README b/README
new file mode 100644
index 00000000..1583c6b0
--- /dev/null
+++ b/README
@@ -0,0 +1,3 @@
+This repository has been moved to GitHub: https://github.com/pygments/pygments
+
+Please do not file new PRs and issues here.
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 350e242e..00000000
--- a/README.rst
+++ /dev/null
@@ -1,39 +0,0 @@
-README for Pygments
-===================
-
-This is the source of Pygments.  It is a generic syntax highlighter that
-supports over 300 languages and text formats, for use in code hosting, forums,
-wikis or other applications that need to prettify source code.
-
-Installing
-----------
-
-... works as usual, use ``python setup.py install``.
-
-Documentation
--------------
-
-... can be found online at http://pygments.org/ or created by ::
-
-   cd doc
-   make html
-
-Development
------------
-
-... takes place on `Bitbucket
-`_, where the Mercurial
-repository, tickets and pull requests can be viewed.
-
-Continuous testing runs on drone.io:
-
-.. image:: https://drone.io/bitbucket.org/birkenfeld/pygments-main/status.png
-   :target: https://drone.io/bitbucket.org/birkenfeld/pygments-main
-
-The authors
------------
-
-Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*.
-
-Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
-the `Pocoo `_ team and **Tim Hatch**.
diff --git a/TODO b/TODO
deleted file mode 100644
index 88076f3d..00000000
--- a/TODO
+++ /dev/null
@@ -1,12 +0,0 @@
-Todo
-====
-
-- lexers that need work:
-  * review perl lexer (numerous bugs, but so far no one had complaints ;)
-  * readd property support for C# lexer? that is, find a regex that doesn't
-    backtrack to death...
-  * add support for function name highlighting to C++ lexer
-
-- allow "overlay" token types to highlight specials: nth line, a word etc.
-
-- pygmentize option presets, more sophisticated method to output styles?
diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml
deleted file mode 100644
index 4a9f1b6d..00000000
--- a/bitbucket-pipelines.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-pipelines:
-  default:
-    - step:
-        name: Test on Python 2.7
-        image: python:2.7
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py27
-    - step:
-        name: Test on Python 3.5
-        image: python:3.5
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py35
-    - step:
-        name: Test on Python 3.6
-        image: python:3.6
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py36
-    - step:
-        name: Test on Python 3.7
-        image: python:3.7
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py37
diff --git a/doc/Makefile b/doc/Makefile
deleted file mode 100644
index 7fb75411..00000000
--- a/doc/Makefile
+++ /dev/null
@@ -1,153 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS    =
-SPHINXBUILD   = PYTHONPATH=.. sphinx-build
-PAPER         =
-BUILDDIR      = _build
-
-# Internal variables.
-PAPEROPT_a4     = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
-	@echo "Please use \`make ' where  is one of"
-	@echo "  html       to make standalone HTML files"
-	@echo "  dirhtml    to make HTML files named index.html in directories"
-	@echo "  singlehtml to make a single large HTML file"
-	@echo "  pickle     to make pickle files"
-	@echo "  json       to make JSON files"
-	@echo "  htmlhelp   to make HTML files and a HTML help project"
-	@echo "  qthelp     to make HTML files and a qthelp project"
-	@echo "  devhelp    to make HTML files and a Devhelp project"
-	@echo "  epub       to make an epub"
-	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
-	@echo "  text       to make text files"
-	@echo "  man        to make manual pages"
-	@echo "  texinfo    to make Texinfo files"
-	@echo "  info       to make Texinfo files and run them through makeinfo"
-	@echo "  gettext    to make PO message catalogs"
-	@echo "  changes    to make an overview of all changed/added/deprecated items"
-	@echo "  linkcheck  to check all external links for integrity"
-	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
-
-clean:
-	-rm -rf $(BUILDDIR)/*
-
-html:
-	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
-	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
-	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
-	@echo
-	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
-	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
-	@echo
-	@echo "Build finished; now you can process the pickle files."
-
-json:
-	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
-	@echo
-	@echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
-	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
-	@echo
-	@echo "Build finished; now you can run HTML Help Workshop with the" \
-	      ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
-	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
-	@echo
-	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
-	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
-	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Pygments.qhcp"
-	@echo "To view the help file:"
-	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Pygments.qhc"
-
-devhelp:
-	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
-	@echo
-	@echo "Build finished."
-	@echo "To view the help file:"
-	@echo "# mkdir -p $$HOME/.local/share/devhelp/Pygments"
-	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Pygments"
-	@echo "# devhelp"
-
-epub:
-	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
-	@echo
-	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo
-	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
-	@echo "Run \`make' in that directory to run these through (pdf)latex" \
-	      "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through pdflatex..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
-	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
-	@echo
-	@echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
-	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
-	@echo
-	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-texinfo:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo
-	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
-	@echo "Run \`make' in that directory to run these through makeinfo" \
-	      "(use \`make info' here to do that automatically)."
-
-info:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo "Running Texinfo files through makeinfo..."
-	make -C $(BUILDDIR)/texinfo info
-	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-gettext:
-	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
-	@echo
-	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-changes:
-	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
-	@echo
-	@echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
-	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
-	@echo
-	@echo "Link check complete; look for any errors in the above output " \
-	      "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
-	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
-	@echo "Testing of doctests in the sources finished, look at the " \
-	      "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/doc/_static/favicon.ico b/doc/_static/favicon.ico
deleted file mode 100644
index 777f617d..00000000
Binary files a/doc/_static/favicon.ico and /dev/null differ
diff --git a/doc/_static/logo_new.png b/doc/_static/logo_new.png
deleted file mode 100644
index 0ae4b209..00000000
Binary files a/doc/_static/logo_new.png and /dev/null differ
diff --git a/doc/_static/logo_only.png b/doc/_static/logo_only.png
deleted file mode 100644
index fdebcc47..00000000
Binary files a/doc/_static/logo_only.png and /dev/null differ
diff --git a/doc/_templates/docssidebar.html b/doc/_templates/docssidebar.html
deleted file mode 100644
index 913acaaf..00000000
--- a/doc/_templates/docssidebar.html
+++ /dev/null
@@ -1,3 +0,0 @@
-{% if pagename != 'docs/index' %}
-« Back to docs index
-{% endif %}
diff --git a/doc/_templates/indexsidebar.html b/doc/_templates/indexsidebar.html
deleted file mode 100644
index 29954554..00000000
--- a/doc/_templates/indexsidebar.html
+++ /dev/null
@@ -1,25 +0,0 @@
-

Download

-{% if version.endswith('(hg)') %} -

This documentation is for version {{ version }}, which is - not released yet.

-

You can use it from the - Mercurial repo or look for - released versions in the Python - Package Index.

-{% else %} -

Current version: {{ version }}

-

Get Pygments from the Python Package -Index, or install it with:

-
pip install Pygments
-{% endif %} - -

Questions? Suggestions?

- -

Clone at Bitbucket -or come to the #pocoo channel on FreeNode.

-

You can also open an issue at the - tracker.

- - - diff --git a/doc/_themes/pygments14/layout.html b/doc/_themes/pygments14/layout.html deleted file mode 100644 index 53f8f37f..00000000 --- a/doc/_themes/pygments14/layout.html +++ /dev/null @@ -1,98 +0,0 @@ -{# - sphinxdoc/layout.html - ~~~~~~~~~~~~~~~~~~~~~ - - Sphinx layout template for the sphinxdoc theme. - - :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -#} -{%- extends "basic/layout.html" %} - -{# put the sidebar before the body #} -{% block sidebar1 %}{{ sidebar() }}{% endblock %} -{% block sidebar2 %}{% endblock %} - -{% block relbar1 %}{% endblock %} -{% block relbar2 %}{% endblock %} - -{% block extrahead %} - -{{ super() }} -{%- if not embedded %} - - -{%- endif %} -{% endblock %} - -{% block header %} -
- -{% endblock %} - -{% block footer %} - -
{# closes "outerwrapper" div #} -{% endblock %} - -{% block sidebarrel %} -{% endblock %} - -{% block sidebarsourcelink %} -{% endblock %} diff --git a/doc/_themes/pygments14/static/bodybg.png b/doc/_themes/pygments14/static/bodybg.png deleted file mode 100644 index 46892b80..00000000 Binary files a/doc/_themes/pygments14/static/bodybg.png and /dev/null differ diff --git a/doc/_themes/pygments14/static/docbg.png b/doc/_themes/pygments14/static/docbg.png deleted file mode 100644 index 13e61f32..00000000 Binary files a/doc/_themes/pygments14/static/docbg.png and /dev/null differ diff --git a/doc/_themes/pygments14/static/listitem.png b/doc/_themes/pygments14/static/listitem.png deleted file mode 100644 index e45715f9..00000000 Binary files a/doc/_themes/pygments14/static/listitem.png and /dev/null differ diff --git a/doc/_themes/pygments14/static/logo.png b/doc/_themes/pygments14/static/logo.png deleted file mode 100644 index 2c1a24dc..00000000 Binary files a/doc/_themes/pygments14/static/logo.png and /dev/null differ diff --git a/doc/_themes/pygments14/static/pocoo.png b/doc/_themes/pygments14/static/pocoo.png deleted file mode 100644 index 41741494..00000000 Binary files a/doc/_themes/pygments14/static/pocoo.png and /dev/null differ diff --git a/doc/_themes/pygments14/static/pygments14.css_t b/doc/_themes/pygments14/static/pygments14.css_t deleted file mode 100644 index 72ca942e..00000000 --- a/doc/_themes/pygments14/static/pygments14.css_t +++ /dev/null @@ -1,401 +0,0 @@ -/* - * pygments14.css - * ~~~~~~~~~~~~~~ - * - * Sphinx stylesheet -- pygments14 theme. Heavily copied from sphinx13. - * - * :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -@import url("basic.css"); - -/* -- page layout ----------------------------------------------------------- */ - -body { - font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', - 'Verdana', sans-serif; - font-size: 14px; - text-align: center; - background-image: url(bodybg.png); - background-color: {{ theme_background }}; - color: black; - padding: 0; - /* - border-right: 1px solid {{ theme_border }}; - border-left: 1px solid {{ theme_border }}; - */ - - margin: 0 auto; - min-width: 780px; - max-width: 1080px; -} - -.outerwrapper { - background-image: url(docbg.png); - background-attachment: fixed; -} - -.pageheader { - text-align: left; - padding: 10px 15px; -} - -.pageheader ul { - float: right; - color: white; - list-style-type: none; - padding-left: 0; - margin-top: 40px; - margin-right: 10px; -} - -.pageheader li { - float: left; - margin: 0 0 0 10px; -} - -.pageheader li a { - border-radius: 3px; - padding: 8px 12px; - color: {{ theme_darkgray }}; - text-shadow: 0 0 5px rgba(0, 0, 0, 0.2); -} - -.pageheader li a:hover { - background-color: {{ theme_yellow }}; - color: black; - text-shadow: none; -} - -div.document { - text-align: left; - /*border-left: 1em solid {{ theme_lightyellow }};*/ -} - -div.bodywrapper { - margin: 0 12px 0 240px; - background-color: white; -/* border-right: 1px solid {{ theme_border }}; */ -} - -div.body { - margin: 0; - padding: 0.5em 20px 20px 20px; -} - -div.related { - font-size: 1em; - color: {{ theme_darkgray }}; -} - -div.related ul { - background-image: url(relbg.png); - background-repeat: repeat-y; - background-color: {{ theme_yellow }}; - height: 1.9em; - /* - border-top: 1px solid {{ theme_border }}; - border-bottom: 1px solid {{ theme_border }}; - */ -} - -div.related ul li { - margin: 0 5px 0 0; - padding: 0; - float: left; -} - -div.related ul li.right { - float: right; - margin-right: 5px; -} - -div.related ul li a { - margin: 0; - padding: 0 5px 0 5px; - line-height: 1.75em; - color: {{ theme_darkgray }}; - /*text-shadow: 0px 0px 1px rgba(0, 0, 0, 0.5);*/ -} - -div.related ul li a:hover { - text-decoration: underline; - text-shadow: 0px 0px 1px rgba(255, 255, 255, 0.5); -} - -div.sphinxsidebarwrapper { - position: relative; - top: 0px; - padding: 0; -} - -div.sphinxsidebar { - margin: 0; - padding: 0 0px 15px 15px; - width: 210px; - float: left; - font-size: 1em; - text-align: left; -} - -div.sphinxsidebar .logo { - font-size: 1.8em; - color: #666; - font-weight: 300; - text-align: center; -} - -div.sphinxsidebar .logo img { - vertical-align: middle; -} - -div.sphinxsidebar input { - border: 1px solid #aaa; - font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', - 'Verdana', sans-serif; - font-size: 1em; -} - -div.sphinxsidebar h3 { - font-size: 1.5em; - /* border-top: 1px solid {{ theme_border }}; */ - margin-top: 1em; - margin-bottom: 0.5em; - padding-top: 0.5em; -} - -div.sphinxsidebar h4 { - font-size: 1.2em; - margin-bottom: 0; -} - -div.sphinxsidebar h3, div.sphinxsidebar h4 { - margin-right: -15px; - margin-left: -15px; - padding-right: 14px; - padding-left: 14px; - color: #333; - font-weight: 300; - /*text-shadow: 0px 0px 0.5px rgba(0, 0, 0, 0.4);*/ -} - -div.sphinxsidebarwrapper > h3:first-child { - margin-top: 0.5em; - border: none; -} - -div.sphinxsidebar h3 a { - color: #333; -} - -div.sphinxsidebar ul { - color: #444; - margin-top: 7px; - padding: 0; - line-height: 130%; -} - -div.sphinxsidebar ul ul { - margin-left: 20px; - list-style-image: url(listitem.png); -} - -div.footer { - color: {{ theme_darkgray }}; - text-shadow: 0 0 .2px rgba(255, 255, 255, 0.8); - padding: 2em; - text-align: center; - clear: both; - font-size: 0.8em; -} - -/* -- body styles ----------------------------------------------------------- */ - -p { - margin: 0.8em 0 0.5em 0; -} - -a { - color: {{ theme_darkgreen }}; - text-decoration: none; -} - -a:hover { - color: {{ theme_darkyellow }}; -} - -div.body a { - text-decoration: underline; -} - -h1 { - margin: 10px 0 0 0; - font-size: 2.4em; - color: {{ theme_darkgray }}; - font-weight: 300; -} - -h2 { - margin: 1.em 0 0.2em 0; - font-size: 1.5em; - font-weight: 300; - padding: 0; - color: {{ theme_darkgreen }}; -} - -h3 { - margin: 1em 0 -0.3em 0; - font-size: 1.3em; - font-weight: 300; -} - -div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { - text-decoration: none; -} - -div.body h1 a tt, div.body h2 a tt, div.body h3 a tt, div.body h4 a tt, div.body h5 a tt, div.body h6 a tt { - color: {{ theme_darkgreen }} !important; - font-size: inherit !important; -} - -a.headerlink { - color: {{ theme_green }} !important; - font-size: 12px; - margin-left: 6px; - padding: 0 4px 0 4px; - text-decoration: none !important; - float: right; -} - -a.headerlink:hover { - background-color: #ccc; - color: white!important; -} - -cite, code, tt { - font-family: 'Consolas', 'DejaVu Sans Mono', - 'Bitstream Vera Sans Mono', monospace; - font-size: 14px; - letter-spacing: -0.02em; -} - -tt { - background-color: #f2f2f2; - border: 1px solid #ddd; - border-radius: 2px; - color: #333; - padding: 1px; -} - -tt.descname, tt.descclassname, tt.xref { - border: 0; -} - -hr { - border: 1px solid #abc; - margin: 2em; -} - -a tt { - border: 0; - color: {{ theme_darkgreen }}; -} - -a tt:hover { - color: {{ theme_darkyellow }}; -} - -pre { - font-family: 'Consolas', 'DejaVu Sans Mono', - 'Bitstream Vera Sans Mono', monospace; - font-size: 13px; - letter-spacing: 0.015em; - line-height: 120%; - padding: 0.5em; - border: 1px solid #ccc; - border-radius: 2px; - background-color: #f8f8f8; -} - -pre a { - color: inherit; - text-decoration: underline; -} - -td.linenos pre { - padding: 0.5em 0; -} - -div.quotebar { - background-color: #f8f8f8; - max-width: 250px; - float: right; - padding: 0px 7px; - border: 1px solid #ccc; - margin-left: 1em; -} - -div.topic { - background-color: #f8f8f8; -} - -table { - border-collapse: collapse; - margin: 0 -0.5em 0 -0.5em; -} - -table td, table th { - padding: 0.2em 0.5em 0.2em 0.5em; -} - -div.admonition, div.warning { - font-size: 0.9em; - margin: 1em 0 1em 0; - border: 1px solid #86989B; - border-radius: 2px; - background-color: #f7f7f7; - padding: 0; -} - -div.admonition p, div.warning p { - margin: 0.5em 1em 0.5em 1em; - padding: 0; -} - -div.admonition pre, div.warning pre { - margin: 0.4em 1em 0.4em 1em; -} - -div.admonition p.admonition-title, -div.warning p.admonition-title { - margin-top: 1em; - padding-top: 0.5em; - font-weight: bold; -} - -div.warning { - border: 1px solid #940000; -/* background-color: #FFCCCF;*/ -} - -div.warning p.admonition-title { -} - -div.admonition ul, div.admonition ol, -div.warning ul, div.warning ol { - margin: 0.1em 0.5em 0.5em 3em; - padding: 0; -} - -.viewcode-back { - font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', - 'Verdana', sans-serif; -} - -div.viewcode-block:target { - background-color: #f4debf; - border-top: 1px solid #ac9; - border-bottom: 1px solid #ac9; -} diff --git a/doc/_themes/pygments14/theme.conf b/doc/_themes/pygments14/theme.conf deleted file mode 100644 index fffe66d6..00000000 --- a/doc/_themes/pygments14/theme.conf +++ /dev/null @@ -1,15 +0,0 @@ -[theme] -inherit = basic -stylesheet = pygments14.css -pygments_style = friendly - -[options] -green = #66b55e -darkgreen = #36852e -darkgray = #666666 -border = #66b55e -yellow = #f4cd00 -darkyellow = #d4ad00 -lightyellow = #fffbe3 -background = #f9f9f9 -font = PT Sans diff --git a/doc/conf.py b/doc/conf.py deleted file mode 100644 index 00db7d9b..00000000 --- a/doc/conf.py +++ /dev/null @@ -1,241 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Pygments documentation build configuration file -# - -import sys, os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) - -import pygments - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'pygments.sphinxext'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Pygments' -copyright = u'2015, Georg Brandl' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = pygments.__version__ -# The full version, including alpha/beta/rc tags. -release = version - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -#pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'pygments14' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = ['_themes'] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -html_favicon = '_static/favicon.ico' - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -html_sidebars = {'index': ['indexsidebar.html'], - 'docs/*': ['docssidebar.html']} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'Pygmentsdoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'Pygments.tex', u'Pygments Documentation', - u'Georg Brandl', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'pygments', u'Pygments Documentation', - [u'Georg Brandl'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'Pygments', u'Pygments Documentation', - u'Georg Brandl', 'Pygments', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - - -# Example configuration for intersphinx: refer to the Python standard library. -#intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/doc/docs/api.rst b/doc/docs/api.rst deleted file mode 100644 index a6b242dd..00000000 --- a/doc/docs/api.rst +++ /dev/null @@ -1,354 +0,0 @@ -.. -*- mode: rst -*- - -===================== -The full Pygments API -===================== - -This page describes the Pygments API. - -High-level API -============== - -.. module:: pygments - -Functions from the :mod:`pygments` module: - -.. function:: lex(code, lexer) - - Lex `code` with the `lexer` (must be a `Lexer` instance) - and return an iterable of tokens. Currently, this only calls - `lexer.get_tokens()`. - -.. function:: format(tokens, formatter, outfile=None) - - Format a token stream (iterable of tokens) `tokens` with the - `formatter` (must be a `Formatter` instance). The result is - written to `outfile`, or if that is ``None``, returned as a - string. - -.. function:: highlight(code, lexer, formatter, outfile=None) - - This is the most high-level highlighting function. - It combines `lex` and `format` in one function. - - -.. module:: pygments.lexers - -Functions from :mod:`pygments.lexers`: - -.. function:: get_lexer_by_name(alias, **options) - - Return an instance of a `Lexer` subclass that has `alias` in its - aliases list. The lexer is given the `options` at its - instantiation. - - Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is - found. - -.. function:: get_lexer_for_filename(fn, **options) - - Return a `Lexer` subclass instance that has a filename pattern - matching `fn`. The lexer is given the `options` at its - instantiation. - - Will raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename - is found. - -.. function:: get_lexer_for_mimetype(mime, **options) - - Return a `Lexer` subclass instance that has `mime` in its mimetype - list. The lexer is given the `options` at its instantiation. - - Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype - is found. - -.. function:: load_lexer_from_file(filename, lexername="CustomLexer", **options) - - Return a `Lexer` subclass instance loaded from the provided file, relative - to the current directory. The file is expected to contain a Lexer class - named `lexername` (by default, CustomLexer). Users should be very careful with - the input, because this method is equivalent to running eval on the input file. - The lexer is given the `options` at its instantiation. - - :exc:`ClassNotFound` is raised if there are any errors loading the Lexer - - .. versionadded:: 2.2 - -.. function:: guess_lexer(text, **options) - - Return a `Lexer` subclass instance that's guessed from the text in - `text`. For that, the :meth:`.analyse_text()` method of every known lexer - class is called with the text as argument, and the lexer which returned the - highest value will be instantiated and returned. - - :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can - handle the content. - -.. function:: guess_lexer_for_filename(filename, text, **options) - - As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames` - or `alias_filenames` that matches `filename` are taken into consideration. - - :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can - handle the content. - -.. function:: get_all_lexers() - - Return an iterable over all registered lexers, yielding tuples in the - format:: - - (longname, tuple of aliases, tuple of filename patterns, tuple of mimetypes) - - .. versionadded:: 0.6 - -.. function:: find_lexer_class_by_name(alias) - - Return the `Lexer` subclass that has `alias` in its aliases list, without - instantiating it. - - Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is - found. - - .. versionadded:: 2.2 - -.. function:: find_lexer_class(name) - - Return the `Lexer` subclass that with the *name* attribute as given by - the *name* argument. - - -.. module:: pygments.formatters - -Functions from :mod:`pygments.formatters`: - -.. function:: get_formatter_by_name(alias, **options) - - Return an instance of a :class:`.Formatter` subclass that has `alias` in its - aliases list. The formatter is given the `options` at its instantiation. - - Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that - alias is found. - -.. function:: get_formatter_for_filename(fn, **options) - - Return a :class:`.Formatter` subclass instance that has a filename pattern - matching `fn`. The formatter is given the `options` at its instantiation. - - Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename - is found. - -.. function:: load_formatter_from_file(filename, formattername="CustomFormatter", **options) - - Return a `Formatter` subclass instance loaded from the provided file, relative - to the current directory. The file is expected to contain a Formatter class - named ``formattername`` (by default, CustomFormatter). Users should be very - careful with the input, because this method is equivalent to running eval - on the input file. The formatter is given the `options` at its instantiation. - - :exc:`ClassNotFound` is raised if there are any errors loading the Formatter - - .. versionadded:: 2.2 - -.. module:: pygments.styles - -Functions from :mod:`pygments.styles`: - -.. function:: get_style_by_name(name) - - Return a style class by its short name. The names of the builtin styles - are listed in :data:`pygments.styles.STYLE_MAP`. - - Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is - found. - -.. function:: get_all_styles() - - Return an iterable over all registered styles, yielding their names. - - .. versionadded:: 0.6 - - -.. module:: pygments.lexer - -Lexers -====== - -The base lexer class from which all lexers are derived is: - -.. class:: Lexer(**options) - - The constructor takes a \*\*keywords dictionary of options. - Every subclass must first process its own options and then call - the `Lexer` constructor, since it processes the `stripnl`, - `stripall` and `tabsize` options. - - An example looks like this: - - .. sourcecode:: python - - def __init__(self, **options): - self.compress = options.get('compress', '') - Lexer.__init__(self, **options) - - As these options must all be specifiable as strings (due to the - command line usage), there are various utility functions - available to help with that, see `Option processing`_. - - .. method:: get_tokens(text) - - This method is the basic interface of a lexer. It is called by - the `highlight()` function. It must process the text and return an - iterable of ``(tokentype, value)`` pairs from `text`. - - Normally, you don't need to override this method. The default - implementation processes the `stripnl`, `stripall` and `tabsize` - options and then yields all tokens from `get_tokens_unprocessed()`, - with the ``index`` dropped. - - .. method:: get_tokens_unprocessed(text) - - This method should process the text and return an iterable of - ``(index, tokentype, value)`` tuples where ``index`` is the starting - position of the token within the input text. - - This method must be overridden by subclasses. - - .. staticmethod:: analyse_text(text) - - A static method which is called for lexer guessing. It should analyse - the text and return a float in the range from ``0.0`` to ``1.0``. - If it returns ``0.0``, the lexer will not be selected as the most - probable one, if it returns ``1.0``, it will be selected immediately. - - .. note:: You don't have to add ``@staticmethod`` to the definition of - this method, this will be taken care of by the Lexer's metaclass. - - For a list of known tokens have a look at the :doc:`tokens` page. - - A lexer also can have the following attributes (in fact, they are mandatory - except `alias_filenames`) that are used by the builtin lookup mechanism. - - .. attribute:: name - - Full name for the lexer, in human-readable form. - - .. attribute:: aliases - - A list of short, unique identifiers that can be used to lookup - the lexer from a list, e.g. using `get_lexer_by_name()`. - - .. attribute:: filenames - - A list of `fnmatch` patterns that match filenames which contain - content for this lexer. The patterns in this list should be unique among - all lexers. - - .. attribute:: alias_filenames - - A list of `fnmatch` patterns that match filenames which may or may not - contain content for this lexer. This list is used by the - :func:`.guess_lexer_for_filename()` function, to determine which lexers - are then included in guessing the correct one. That means that - e.g. every lexer for HTML and a template language should include - ``\*.html`` in this list. - - .. attribute:: mimetypes - - A list of MIME types for content that can be lexed with this - lexer. - - -.. module:: pygments.formatter - -Formatters -========== - -A formatter is derived from this class: - - -.. class:: Formatter(**options) - - As with lexers, this constructor processes options and then must call the - base class :meth:`__init__`. - - The :class:`Formatter` class recognizes the options `style`, `full` and - `title`. It is up to the formatter class whether it uses them. - - .. method:: get_style_defs(arg='') - - This method must return statements or declarations suitable to define - the current style for subsequent highlighted text (e.g. CSS classes - in the `HTMLFormatter`). - - The optional argument `arg` can be used to modify the generation and - is formatter dependent (it is standardized because it can be given on - the command line). - - This method is called by the ``-S`` :doc:`command-line option `, - the `arg` is then given by the ``-a`` option. - - .. method:: format(tokensource, outfile) - - This method must format the tokens from the `tokensource` iterable and - write the formatted version to the file object `outfile`. - - Formatter options can control how exactly the tokens are converted. - - .. versionadded:: 0.7 - A formatter must have the following attributes that are used by the - builtin lookup mechanism. - - .. attribute:: name - - Full name for the formatter, in human-readable form. - - .. attribute:: aliases - - A list of short, unique identifiers that can be used to lookup - the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. - - .. attribute:: filenames - - A list of :mod:`fnmatch` patterns that match filenames for which this - formatter can produce output. The patterns in this list should be unique - among all formatters. - - -.. module:: pygments.util - -Option processing -================= - -The :mod:`pygments.util` module has some utility functions usable for option -processing: - -.. exception:: OptionError - - This exception will be raised by all option processing functions if - the type or value of the argument is not correct. - -.. function:: get_bool_opt(options, optname, default=None) - - Interpret the key `optname` from the dictionary `options` as a boolean and - return it. Return `default` if `optname` is not in `options`. - - The valid string values for ``True`` are ``1``, ``yes``, ``true`` and - ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off`` - (matched case-insensitively). - -.. function:: get_int_opt(options, optname, default=None) - - As :func:`get_bool_opt`, but interpret the value as an integer. - -.. function:: get_list_opt(options, optname, default=None) - - If the key `optname` from the dictionary `options` is a string, - split it at whitespace and return it. If it is already a list - or a tuple, it is returned as a list. - -.. function:: get_choice_opt(options, optname, allowed, default=None) - - If the key `optname` from the dictionary is not in the sequence - `allowed`, raise an error, otherwise return it. - - .. versionadded:: 0.8 diff --git a/doc/docs/authors.rst b/doc/docs/authors.rst deleted file mode 100644 index f8373f0a..00000000 --- a/doc/docs/authors.rst +++ /dev/null @@ -1,4 +0,0 @@ -Full contributor list -===================== - -.. include:: ../../AUTHORS diff --git a/doc/docs/changelog.rst b/doc/docs/changelog.rst deleted file mode 100644 index f264cab0..00000000 --- a/doc/docs/changelog.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../CHANGES diff --git a/doc/docs/cmdline.rst b/doc/docs/cmdline.rst deleted file mode 100644 index e4f94ea5..00000000 --- a/doc/docs/cmdline.rst +++ /dev/null @@ -1,166 +0,0 @@ -.. -*- mode: rst -*- - -====================== -Command Line Interface -====================== - -You can use Pygments from the shell, provided you installed the -:program:`pygmentize` script:: - - $ pygmentize test.py - print "Hello World" - -will print the file test.py to standard output, using the Python lexer -(inferred from the file name extension) and the terminal formatter (because -you didn't give an explicit formatter name). - -If you want HTML output:: - - $ pygmentize -f html -l python -o test.html test.py - -As you can see, the -l option explicitly selects a lexer. As seen above, if you -give an input file name and it has an extension that Pygments recognizes, you can -omit this option. - -The ``-o`` option gives an output file name. If it is not given, output is -written to stdout. - -The ``-f`` option selects a formatter (as with ``-l``, it can also be omitted -if an output file name is given and has a supported extension). -If no output file name is given and ``-f`` is omitted, the -:class:`.TerminalFormatter` is used. - -The above command could therefore also be given as:: - - $ pygmentize -o test.html test.py - -To create a full HTML document, including line numbers and stylesheet (using the -"emacs" style), highlighting the Python file ``test.py`` to ``test.html``:: - - $ pygmentize -O full,style=emacs -o test.html test.py - - -Options and filters -------------------- - -Lexer and formatter options can be given using the ``-O`` option:: - - $ pygmentize -f html -O style=colorful,linenos=1 -l python test.py - -Be sure to enclose the option string in quotes if it contains any special shell -characters, such as spaces or expansion wildcards like ``*``. If an option -expects a list value, separate the list entries with spaces (you'll have to -quote the option value in this case too, so that the shell doesn't split it). - -Since the ``-O`` option argument is split at commas and expects the split values -to be of the form ``name=value``, you can't give an option value that contains -commas or equals signs. Therefore, an option ``-P`` is provided (as of Pygments -0.9) that works like ``-O`` but can only pass one option per ``-P``. Its value -can then contain all characters:: - - $ pygmentize -P "heading=Pygments, the Python highlighter" ... - -Filters are added to the token stream using the ``-F`` option:: - - $ pygmentize -f html -l pascal -F keywordcase:case=upper main.pas - -As you see, options for the filter are given after a colon. As for ``-O``, the -filter name and options must be one shell word, so there may not be any spaces -around the colon. - - -Generating styles ------------------ - -Formatters normally don't output full style information. For example, the HTML -formatter by default only outputs ```` tags with ``class`` attributes. -Therefore, there's a special ``-S`` option for generating style definitions. -Usage is as follows:: - - $ pygmentize -f html -S colorful -a .syntax - -generates a CSS style sheet (because you selected the HTML formatter) for -the "colorful" style prepending a ".syntax" selector to all style rules. - -For an explanation what ``-a`` means for :doc:`a particular formatter -`, look for the `arg` argument for the formatter's -:meth:`.get_style_defs()` method. - - -Getting lexer names -------------------- - -.. versionadded:: 1.0 - -The ``-N`` option guesses a lexer name for a given filename, so that :: - - $ pygmentize -N setup.py - -will print out ``python``. It won't highlight anything yet. If no specific -lexer is known for that filename, ``text`` is printed. - -Custom Lexers and Formatters ----------------------------- - -.. versionadded:: 2.2 - -The ``-x`` flag enables custom lexers and formatters to be loaded -from files relative to the current directory. Create a file with a class named -CustomLexer or CustomFormatter, then specify it on the command line:: - - $ pygmentize -l your_lexer.py -f your_formatter.py -x - -You can also specify the name of your class with a colon:: - - $ pygmentize -l your_lexer.py:SomeLexer -x - -For more information, see :doc:`the Pygments documentation on Lexer development -`. - -Getting help ------------- - -The ``-L`` option lists lexers, formatters, along with their short -names and supported file name extensions, styles and filters. If you want to see -only one category, give it as an argument:: - - $ pygmentize -L filters - -will list only all installed filters. - -The ``-H`` option will give you detailed information (the same that can be found -in this documentation) about a lexer, formatter or filter. Usage is as follows:: - - $ pygmentize -H formatter html - -will print the help for the HTML formatter, while :: - - $ pygmentize -H lexer python - -will print the help for the Python lexer, etc. - - -A note on encodings -------------------- - -.. versionadded:: 0.9 - -Pygments tries to be smart regarding encodings in the formatting process: - -* If you give an ``encoding`` option, it will be used as the input and - output encoding. - -* If you give an ``outencoding`` option, it will override ``encoding`` - as the output encoding. - -* If you give an ``inencoding`` option, it will override ``encoding`` - as the input encoding. - -* If you don't give an encoding and have given an output file, the default - encoding for lexer and formatter is the terminal encoding or the default - locale encoding of the system. As a last resort, ``latin1`` is used (which - will pass through all non-ASCII characters). - -* If you don't give an encoding and haven't given an output file (that means - output is written to the console), the default encoding for lexer and - formatter is the terminal encoding (``sys.stdout.encoding``). diff --git a/doc/docs/filterdevelopment.rst b/doc/docs/filterdevelopment.rst deleted file mode 100644 index fbcd0a09..00000000 --- a/doc/docs/filterdevelopment.rst +++ /dev/null @@ -1,71 +0,0 @@ -.. -*- mode: rst -*- - -===================== -Write your own filter -===================== - -.. versionadded:: 0.7 - -Writing own filters is very easy. All you have to do is to subclass -the `Filter` class and override the `filter` method. Additionally a -filter is instantiated with some keyword arguments you can use to -adjust the behavior of your filter. - - -Subclassing Filters -=================== - -As an example, we write a filter that converts all `Name.Function` tokens -to normal `Name` tokens to make the output less colorful. - -.. sourcecode:: python - - from pygments.util import get_bool_opt - from pygments.token import Name - from pygments.filter import Filter - - class UncolorFilter(Filter): - - def __init__(self, **options): - Filter.__init__(self, **options) - self.class_too = get_bool_opt(options, 'classtoo') - - def filter(self, lexer, stream): - for ttype, value in stream: - if ttype is Name.Function or (self.class_too and - ttype is Name.Class): - ttype = Name - yield ttype, value - -Some notes on the `lexer` argument: that can be quite confusing since it doesn't -need to be a lexer instance. If a filter was added by using the `add_filter()` -function of lexers, that lexer is registered for the filter. In that case -`lexer` will refer to the lexer that has registered the filter. It *can* be used -to access options passed to a lexer. Because it could be `None` you always have -to check for that case if you access it. - - -Using a decorator -================= - -You can also use the `simplefilter` decorator from the `pygments.filter` module: - -.. sourcecode:: python - - from pygments.util import get_bool_opt - from pygments.token import Name - from pygments.filter import simplefilter - - - @simplefilter - def uncolor(self, lexer, stream, options): - class_too = get_bool_opt(options, 'classtoo') - for ttype, value in stream: - if ttype is Name.Function or (class_too and - ttype is Name.Class): - ttype = Name - yield ttype, value - -The decorator automatically subclasses an internal filter class and uses the -decorated function as a method for filtering. (That's why there is a `self` -argument that you probably won't end up using in the method.) diff --git a/doc/docs/filters.rst b/doc/docs/filters.rst deleted file mode 100644 index ff2519a3..00000000 --- a/doc/docs/filters.rst +++ /dev/null @@ -1,41 +0,0 @@ -.. -*- mode: rst -*- - -======= -Filters -======= - -.. versionadded:: 0.7 - -You can filter token streams coming from lexers to improve or annotate the -output. For example, you can highlight special words in comments, convert -keywords to upper or lowercase to enforce a style guide etc. - -To apply a filter, you can use the `add_filter()` method of a lexer: - -.. sourcecode:: pycon - - >>> from pygments.lexers import PythonLexer - >>> l = PythonLexer() - >>> # add a filter given by a string and options - >>> l.add_filter('codetagify', case='lower') - >>> l.filters - [] - >>> from pygments.filters import KeywordCaseFilter - >>> # or give an instance - >>> l.add_filter(KeywordCaseFilter(case='lower')) - -The `add_filter()` method takes keyword arguments which are forwarded to -the constructor of the filter. - -To get a list of all registered filters by name, you can use the -`get_all_filters()` function from the `pygments.filters` module that returns an -iterable for all known filters. - -If you want to write your own filter, have a look at :doc:`Write your own filter -`. - - -Builtin Filters -=============== - -.. pygmentsdoc:: filters diff --git a/doc/docs/formatterdevelopment.rst b/doc/docs/formatterdevelopment.rst deleted file mode 100644 index 2bfac05c..00000000 --- a/doc/docs/formatterdevelopment.rst +++ /dev/null @@ -1,169 +0,0 @@ -.. -*- mode: rst -*- - -======================== -Write your own formatter -======================== - -As well as creating :doc:`your own lexer `, writing a new -formatter for Pygments is easy and straightforward. - -A formatter is a class that is initialized with some keyword arguments (the -formatter options) and that must provides a `format()` method. -Additionally a formatter should provide a `get_style_defs()` method that -returns the style definitions from the style in a form usable for the -formatter's output format. - - -Quickstart -========== - -The most basic formatter shipped with Pygments is the `NullFormatter`. It just -sends the value of a token to the output stream: - -.. sourcecode:: python - - from pygments.formatter import Formatter - - class NullFormatter(Formatter): - def format(self, tokensource, outfile): - for ttype, value in tokensource: - outfile.write(value) - -As you can see, the `format()` method is passed two parameters: `tokensource` -and `outfile`. The first is an iterable of ``(token_type, value)`` tuples, -the latter a file like object with a `write()` method. - -Because the formatter is that basic it doesn't overwrite the `get_style_defs()` -method. - - -Styles -====== - -Styles aren't instantiated but their metaclass provides some class functions -so that you can access the style definitions easily. - -Styles are iterable and yield tuples in the form ``(ttype, d)`` where `ttype` -is a token and `d` is a dict with the following keys: - -``'color'`` - Hexadecimal color value (eg: ``'ff0000'`` for red) or `None` if not - defined. - -``'bold'`` - `True` if the value should be bold - -``'italic'`` - `True` if the value should be italic - -``'underline'`` - `True` if the value should be underlined - -``'bgcolor'`` - Hexadecimal color value for the background (eg: ``'eeeeeee'`` for light - gray) or `None` if not defined. - -``'border'`` - Hexadecimal color value for the border (eg: ``'0000aa'`` for a dark - blue) or `None` for no border. - -Additional keys might appear in the future, formatters should ignore all keys -they don't support. - - -HTML 3.2 Formatter -================== - -For an more complex example, let's implement a HTML 3.2 Formatter. We don't -use CSS but inline markup (````, ````, etc). Because this isn't good -style this formatter isn't in the standard library ;-) - -.. sourcecode:: python - - from pygments.formatter import Formatter - - class OldHtmlFormatter(Formatter): - - def __init__(self, **options): - Formatter.__init__(self, **options) - - # create a dict of (start, end) tuples that wrap the - # value of a token so that we can use it in the format - # method later - self.styles = {} - - # we iterate over the `_styles` attribute of a style item - # that contains the parsed style values. - for token, style in self.style: - start = end = '' - # a style item is a tuple in the following form: - # colors are readily specified in hex: 'RRGGBB' - if style['color']: - start += '' % style['color'] - end = '' + end - if style['bold']: - start += '' - end = '' + end - if style['italic']: - start += '' - end = '' + end - if style['underline']: - start += '' - end = '' + end - self.styles[token] = (start, end) - - def format(self, tokensource, outfile): - # lastval is a string we use for caching - # because it's possible that an lexer yields a number - # of consecutive tokens with the same token type. - # to minimize the size of the generated html markup we - # try to join the values of same-type tokens here - lastval = '' - lasttype = None - - # wrap the whole output with
-            outfile.write('
')
-
-            for ttype, value in tokensource:
-                # if the token type doesn't exist in the stylemap
-                # we try it with the parent of the token type
-                # eg: parent of Token.Literal.String.Double is
-                # Token.Literal.String
-                while ttype not in self.styles:
-                    ttype = ttype.parent
-                if ttype == lasttype:
-                    # the current token type is the same of the last
-                    # iteration. cache it
-                    lastval += value
-                else:
-                    # not the same token as last iteration, but we
-                    # have some data in the buffer. wrap it with the
-                    # defined style and write it to the output file
-                    if lastval:
-                        stylebegin, styleend = self.styles[lasttype]
-                        outfile.write(stylebegin + lastval + styleend)
-                    # set lastval/lasttype to current values
-                    lastval = value
-                    lasttype = ttype
-
-            # if something is left in the buffer, write it to the
-            # output file, then close the opened 
 tag
-            if lastval:
-                stylebegin, styleend = self.styles[lasttype]
-                outfile.write(stylebegin + lastval + styleend)
-            outfile.write('
\n') - -The comments should explain it. Again, this formatter doesn't override the -`get_style_defs()` method. If we would have used CSS classes instead of -inline HTML markup, we would need to generate the CSS first. For that -purpose the `get_style_defs()` method exists: - - -Generating Style Definitions -============================ - -Some formatters like the `LatexFormatter` and the `HtmlFormatter` don't -output inline markup but reference either macros or css classes. Because -the definitions of those are not part of the output, the `get_style_defs()` -method exists. It is passed one parameter (if it's used and how it's used -is up to the formatter) and has to return a string or ``None``. diff --git a/doc/docs/formatters.rst b/doc/docs/formatters.rst deleted file mode 100644 index 9e7074e8..00000000 --- a/doc/docs/formatters.rst +++ /dev/null @@ -1,48 +0,0 @@ -.. -*- mode: rst -*- - -==================== -Available formatters -==================== - -This page lists all builtin formatters. - -Common options -============== - -All formatters support these options: - -`encoding` - If given, must be an encoding name (such as ``"utf-8"``). This will - be used to convert the token strings (which are Unicode strings) - to byte strings in the output (default: ``None``). - It will also be written in an encoding declaration suitable for the - document format if the `full` option is given (e.g. a ``meta - content-type`` directive in HTML or an invocation of the `inputenc` - package in LaTeX). - - If this is ``""`` or ``None``, Unicode strings will be written - to the output file, which most file-like objects do not support. - For example, `pygments.highlight()` will return a Unicode string if - called with no `outfile` argument and a formatter that has `encoding` - set to ``None`` because it uses a `StringIO.StringIO` object that - supports Unicode arguments to `write()`. Using a regular file object - wouldn't work. - - .. versionadded:: 0.6 - -`outencoding` - When using Pygments from the command line, any `encoding` option given is - passed to the lexer and the formatter. This is sometimes not desirable, - for example if you want to set the input encoding to ``"guess"``. - Therefore, `outencoding` has been introduced which overrides `encoding` - for the formatter if given. - - .. versionadded:: 0.7 - - -Formatter classes -================= - -All these classes are importable from :mod:`pygments.formatters`. - -.. pygmentsdoc:: formatters diff --git a/doc/docs/index.rst b/doc/docs/index.rst deleted file mode 100644 index 30d5c085..00000000 --- a/doc/docs/index.rst +++ /dev/null @@ -1,66 +0,0 @@ -Pygments documentation -====================== - -**Starting with Pygments** - -.. toctree:: - :maxdepth: 1 - - ../download - quickstart - cmdline - -**Builtin components** - -.. toctree:: - :maxdepth: 1 - - lexers - filters - formatters - styles - -**Reference** - -.. toctree:: - :maxdepth: 1 - - unicode - tokens - api - -**Hacking for Pygments** - -.. toctree:: - :maxdepth: 1 - - lexerdevelopment - formatterdevelopment - filterdevelopment - plugins - -**Hints and tricks** - -.. toctree:: - :maxdepth: 1 - - rstdirective - moinmoin - java - integrate - -**About Pygments** - -.. toctree:: - :maxdepth: 1 - - changelog - authors - - -If you find bugs or have suggestions for the documentation, please look -:ref:`here ` for info on how to contact the team. - -.. XXX You can download an offline version of this documentation from the - :doc:`download page `. - diff --git a/doc/docs/integrate.rst b/doc/docs/integrate.rst deleted file mode 100644 index 77daaa43..00000000 --- a/doc/docs/integrate.rst +++ /dev/null @@ -1,40 +0,0 @@ -.. -*- mode: rst -*- - -=================================== -Using Pygments in various scenarios -=================================== - -Markdown --------- - -Since Pygments 0.9, the distribution ships Markdown_ preprocessor sample code -that uses Pygments to render source code in -:file:`external/markdown-processor.py`. You can copy and adapt it to your -liking. - -.. _Markdown: http://www.freewisdom.org/projects/python-markdown/ - -TextMate --------- - -Antonio Cangiano has created a Pygments bundle for TextMate that allows to -colorize code via a simple menu option. It can be found here_. - -.. _here: http://antoniocangiano.com/2008/10/28/pygments-textmate-bundle/ - -Bash completion ---------------- - -The source distribution contains a file ``external/pygments.bashcomp`` that -sets up completion for the ``pygmentize`` command in bash. - -Wrappers for other languages ----------------------------- - -These libraries provide Pygments highlighting for users of other languages -than Python: - -* `pygments.rb `_, a pygments wrapper for Ruby -* `Clygments `_, a pygments wrapper for - Clojure -* `PHPygments `_, a pygments wrapper for PHP diff --git a/doc/docs/java.rst b/doc/docs/java.rst deleted file mode 100644 index f553463c..00000000 --- a/doc/docs/java.rst +++ /dev/null @@ -1,70 +0,0 @@ -===================== -Use Pygments in Java -===================== - -Thanks to `Jython `_ it is possible to use Pygments in -Java. - -This page is a simple tutorial to get an idea of how this works. You can -then look at the `Jython documentation `_ for more -advanced uses. - -Since version 1.5, Pygments is deployed on `Maven Central -`_ as a JAR, as is Jython -which makes it a lot easier to create a Java project. - -Here is an example of a `Maven `_ ``pom.xml`` file for a -project running Pygments: - -.. sourcecode:: xml - - - - - 4.0.0 - example - example - 1.0-SNAPSHOT - - - org.python - jython-standalone - 2.5.3 - - - org.pygments - pygments - 1.5 - runtime - - - - -The following Java example: - -.. sourcecode:: java - - PythonInterpreter interpreter = new PythonInterpreter(); - - // Set a variable with the content you want to work with - interpreter.set("code", code); - - // Simple use Pygments as you would in Python - interpreter.exec("from pygments import highlight\n" - + "from pygments.lexers import PythonLexer\n" - + "from pygments.formatters import HtmlFormatter\n" - + "\nresult = highlight(code, PythonLexer(), HtmlFormatter())"); - - // Get the result that has been set in a variable - System.out.println(interpreter.get("result", String.class)); - -will print something like: - -.. sourcecode:: html - -
-
print "Hello World"
-
diff --git a/doc/docs/lexerdevelopment.rst b/doc/docs/lexerdevelopment.rst deleted file mode 100644 index 63bd01a3..00000000 --- a/doc/docs/lexerdevelopment.rst +++ /dev/null @@ -1,728 +0,0 @@ -.. -*- mode: rst -*- - -.. highlight:: python - -==================== -Write your own lexer -==================== - -If a lexer for your favorite language is missing in the Pygments package, you -can easily write your own and extend Pygments. - -All you need can be found inside the :mod:`pygments.lexer` module. As you can -read in the :doc:`API documentation `, a lexer is a class that is -initialized with some keyword arguments (the lexer options) and that provides a -:meth:`.get_tokens_unprocessed()` method which is given a string or unicode -object with the data to lex. - -The :meth:`.get_tokens_unprocessed()` method must return an iterator or iterable -containing tuples in the form ``(index, token, value)``. Normally you don't -need to do this since there are base lexers that do most of the work and that -you can subclass. - - -RegexLexer -========== - -The lexer base class used by almost all of Pygments' lexers is the -:class:`RegexLexer`. This class allows you to define lexing rules in terms of -*regular expressions* for different *states*. - -States are groups of regular expressions that are matched against the input -string at the *current position*. If one of these expressions matches, a -corresponding action is performed (such as yielding a token with a specific -type, or changing state), the current position is set to where the last match -ended and the matching process continues with the first regex of the current -state. - -Lexer states are kept on a stack: each time a new state is entered, the new -state is pushed onto the stack. The most basic lexers (like the `DiffLexer`) -just need one state. - -Each state is defined as a list of tuples in the form (`regex`, `action`, -`new_state`) where the last item is optional. In the most basic form, `action` -is a token type (like `Name.Builtin`). That means: When `regex` matches, emit a -token with the match text and type `tokentype` and push `new_state` on the state -stack. If the new state is ``'#pop'``, the topmost state is popped from the -stack instead. To pop more than one state, use ``'#pop:2'`` and so on. -``'#push'`` is a synonym for pushing the current state on the stack. - -The following example shows the `DiffLexer` from the builtin lexers. Note that -it contains some additional attributes `name`, `aliases` and `filenames` which -aren't required for a lexer. They are used by the builtin lexer lookup -functions. :: - - from pygments.lexer import RegexLexer - from pygments.token import * - - class DiffLexer(RegexLexer): - name = 'Diff' - aliases = ['diff'] - filenames = ['*.diff'] - - tokens = { - 'root': [ - (r' .*\n', Text), - (r'\+.*\n', Generic.Inserted), - (r'-.*\n', Generic.Deleted), - (r'@.*\n', Generic.Subheading), - (r'Index.*\n', Generic.Heading), - (r'=.*\n', Generic.Heading), - (r'.*\n', Text), - ] - } - -As you can see this lexer only uses one state. When the lexer starts scanning -the text, it first checks if the current character is a space. If this is true -it scans everything until newline and returns the data as a `Text` token (which -is the "no special highlighting" token). - -If this rule doesn't match, it checks if the current char is a plus sign. And -so on. - -If no rule matches at the current position, the current char is emitted as an -`Error` token that indicates a lexing error, and the position is increased by -one. - - -Adding and testing a new lexer -============================== - -The easiest way to use a new lexer is to use Pygments' support for loading -the lexer from a file relative to your current directory. - -First, change the name of your lexer class to CustomLexer: - -.. code-block:: python - - from pygments.lexer import RegexLexer - from pygments.token import * - - class CustomLexer(RegexLexer): - """All your lexer code goes here!""" - -Then you can load the lexer from the command line with the additional -flag ``-x``: - -.. code-block:: console - - $ pygmentize -l your_lexer_file.py -x - -To specify a class name other than CustomLexer, append it with a colon: - -.. code-block:: console - - $ pygmentize -l your_lexer.py:SomeLexer -x - -Or, using the Python API: - -.. code-block:: python - - # For a lexer named CustomLexer - your_lexer = load_lexer_from_file(filename, **options) - - # For a lexer named MyNewLexer - your_named_lexer = load_lexer_from_file(filename, "MyNewLexer", **options) - -When loading custom lexers and formatters, be extremely careful to use only -trusted files; Pygments will perform the equivalent of ``eval`` on them. - -If you only want to use your lexer with the Pygments API, you can import and -instantiate the lexer yourself, then pass it to :func:`pygments.highlight`. - -To prepare your new lexer for inclusion in the Pygments distribution, so that it -will be found when passing filenames or lexer aliases from the command line, you -have to perform the following steps. - -First, change to the current directory containing the Pygments source code. You -will need to have either an unpacked source tarball, or (preferably) a copy -cloned from BitBucket. - -.. code-block:: console - - $ cd .../pygments-main - -Select a matching module under ``pygments/lexers``, or create a new module for -your lexer class. - -Next, make sure the lexer is known from outside of the module. All modules in -the ``pygments.lexers`` package specify ``__all__``. For example, -``esoteric.py`` sets:: - - __all__ = ['BrainfuckLexer', 'BefungeLexer', ...] - -Add the name of your lexer class to this list (or create the list if your lexer -is the only class in the module). - -Finally the lexer can be made publicly known by rebuilding the lexer mapping: - -.. code-block:: console - - $ make mapfiles - -To test the new lexer, store an example file with the proper extension in -``tests/examplefiles``. For example, to test your ``DiffLexer``, add a -``tests/examplefiles/example.diff`` containing a sample diff output. - -Now you can use pygmentize to render your example to HTML: - -.. code-block:: console - - $ ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff - -Note that this explicitly calls the ``pygmentize`` in the current directory -by preceding it with ``./``. This ensures your modifications are used. -Otherwise a possibly already installed, unmodified version without your new -lexer would have been called from the system search path (``$PATH``). - -To view the result, open ``/tmp/example.html`` in your browser. - -Once the example renders as expected, you should run the complete test suite: - -.. code-block:: console - - $ make test - -It also tests that your lexer fulfills the lexer API and certain invariants, -such as that the concatenation of all token text is the same as the input text. - - -Regex Flags -=========== - -You can either define regex flags locally in the regex (``r'(?x)foo bar'``) or -globally by adding a `flags` attribute to your lexer class. If no attribute is -defined, it defaults to `re.MULTILINE`. For more information about regular -expression flags see the page about `regular expressions`_ in the Python -documentation. - -.. _regular expressions: http://docs.python.org/library/re.html#regular-expression-syntax - - -Scanning multiple tokens at once -================================ - -So far, the `action` element in the rule tuple of regex, action and state has -been a single token type. Now we look at the first of several other possible -values. - -Here is a more complex lexer that highlights INI files. INI files consist of -sections, comments and ``key = value`` pairs:: - - from pygments.lexer import RegexLexer, bygroups - from pygments.token import * - - class IniLexer(RegexLexer): - name = 'INI' - aliases = ['ini', 'cfg'] - filenames = ['*.ini', '*.cfg'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r';.*?$', Comment), - (r'\[.*?\]$', Keyword), - (r'(.*?)(\s*)(=)(\s*)(.*?)$', - bygroups(Name.Attribute, Text, Operator, Text, String)) - ] - } - -The lexer first looks for whitespace, comments and section names. Later it -looks for a line that looks like a key, value pair, separated by an ``'='`` -sign, and optional whitespace. - -The `bygroups` helper yields each capturing group in the regex with a different -token type. First the `Name.Attribute` token, then a `Text` token for the -optional whitespace, after that a `Operator` token for the equals sign. Then a -`Text` token for the whitespace again. The rest of the line is returned as -`String`. - -Note that for this to work, every part of the match must be inside a capturing -group (a ``(...)``), and there must not be any nested capturing groups. If you -nevertheless need a group, use a non-capturing group defined using this syntax: -``(?:some|words|here)`` (note the ``?:`` after the beginning parenthesis). - -If you find yourself needing a capturing group inside the regex which shouldn't -be part of the output but is used in the regular expressions for backreferencing -(eg: ``r'(<(foo|bar)>)(.*?)()'``), you can pass `None` to the bygroups -function and that group will be skipped in the output. - - -Changing states -=============== - -Many lexers need multiple states to work as expected. For example, some -languages allow multiline comments to be nested. Since this is a recursive -pattern it's impossible to lex just using regular expressions. - -Here is a lexer that recognizes C++ style comments (multi-line with ``/* */`` -and single-line with ``//`` until end of line):: - - from pygments.lexer import RegexLexer - from pygments.token import * - - class CppCommentLexer(RegexLexer): - name = 'Example Lexer with states' - - tokens = { - 'root': [ - (r'[^/]+', Text), - (r'/\*', Comment.Multiline, 'comment'), - (r'//.*?$', Comment.Singleline), - (r'/', Text) - ], - 'comment': [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) - ] - } - -This lexer starts lexing in the ``'root'`` state. It tries to match as much as -possible until it finds a slash (``'/'``). If the next character after the slash -is an asterisk (``'*'``) the `RegexLexer` sends those two characters to the -output stream marked as `Comment.Multiline` and continues lexing with the rules -defined in the ``'comment'`` state. - -If there wasn't an asterisk after the slash, the `RegexLexer` checks if it's a -Singleline comment (i.e. followed by a second slash). If this also wasn't the -case it must be a single slash, which is not a comment starter (the separate -regex for a single slash must also be given, else the slash would be marked as -an error token). - -Inside the ``'comment'`` state, we do the same thing again. Scan until the -lexer finds a star or slash. If it's the opening of a multiline comment, push -the ``'comment'`` state on the stack and continue scanning, again in the -``'comment'`` state. Else, check if it's the end of the multiline comment. If -yes, pop one state from the stack. - -Note: If you pop from an empty stack you'll get an `IndexError`. (There is an -easy way to prevent this from happening: don't ``'#pop'`` in the root state). - -If the `RegexLexer` encounters a newline that is flagged as an error token, the -stack is emptied and the lexer continues scanning in the ``'root'`` state. This -can help producing error-tolerant highlighting for erroneous input, e.g. when a -single-line string is not closed. - - -Advanced state tricks -===================== - -There are a few more things you can do with states: - -- You can push multiple states onto the stack if you give a tuple instead of a - simple string as the third item in a rule tuple. For example, if you want to - match a comment containing a directive, something like: - - .. code-block:: text - - /* rest of comment */ - - you can use this rule:: - - tokens = { - 'root': [ - (r'/\* <', Comment, ('comment', 'directive')), - ... - ], - 'directive': [ - (r'[^>]*', Comment.Directive), - (r'>', Comment, '#pop'), - ], - 'comment': [ - (r'[^*]+', Comment), - (r'\*/', Comment, '#pop'), - (r'\*', Comment), - ] - } - - When this encounters the above sample, first ``'comment'`` and ``'directive'`` - are pushed onto the stack, then the lexer continues in the directive state - until it finds the closing ``>``, then it continues in the comment state until - the closing ``*/``. Then, both states are popped from the stack again and - lexing continues in the root state. - - .. versionadded:: 0.9 - The tuple can contain the special ``'#push'`` and ``'#pop'`` (but not - ``'#pop:n'``) directives. - - -- You can include the rules of a state in the definition of another. This is - done by using `include` from `pygments.lexer`:: - - from pygments.lexer import RegexLexer, bygroups, include - from pygments.token import * - - class ExampleLexer(RegexLexer): - tokens = { - 'comments': [ - (r'/\*.*?\*/', Comment), - (r'//.*?\n', Comment), - ], - 'root': [ - include('comments'), - (r'(function )(\w+)( {)', - bygroups(Keyword, Name, Keyword), 'function'), - (r'.', Text), - ], - 'function': [ - (r'[^}/]+', Text), - include('comments'), - (r'/', Text), - (r'\}', Keyword, '#pop'), - ] - } - - This is a hypothetical lexer for a language that consist of functions and - comments. Because comments can occur at toplevel and in functions, we need - rules for comments in both states. As you can see, the `include` helper saves - repeating rules that occur more than once (in this example, the state - ``'comment'`` will never be entered by the lexer, as it's only there to be - included in ``'root'`` and ``'function'``). - -- Sometimes, you may want to "combine" a state from existing ones. This is - possible with the `combined` helper from `pygments.lexer`. - - If you, instead of a new state, write ``combined('state1', 'state2')`` as the - third item of a rule tuple, a new anonymous state will be formed from state1 - and state2 and if the rule matches, the lexer will enter this state. - - This is not used very often, but can be helpful in some cases, such as the - `PythonLexer`'s string literal processing. - -- If you want your lexer to start lexing in a different state you can modify the - stack by overriding the `get_tokens_unprocessed()` method:: - - from pygments.lexer import RegexLexer - - class ExampleLexer(RegexLexer): - tokens = {...} - - def get_tokens_unprocessed(self, text, stack=('root', 'otherstate')): - for item in RegexLexer.get_tokens_unprocessed(self, text, stack): - yield item - - Some lexers like the `PhpLexer` use this to make the leading ``', Name.Tag), - ], - 'script-content': [ - (r'(.+?)(<\s*/\s*script\s*>)', - bygroups(using(JavascriptLexer), Name.Tag), - '#pop'), - ] - } - -Here the content of a ```` end tag is processed by the `JavascriptLexer`, -while the end tag is yielded as a normal token with the `Name.Tag` type. - -Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule. -Here, two states are pushed onto the state stack, ``'script-content'`` and -``'tag'``. That means that first ``'tag'`` is processed, which will lex -attributes and the closing ``>``, then the ``'tag'`` state is popped and the -next state on top of the stack will be ``'script-content'``. - -Since you cannot refer to the class currently being defined, use `this` -(imported from `pygments.lexer`) to refer to the current lexer class, i.e. -``using(this)``. This construct may seem unnecessary, but this is often the -most obvious way of lexing arbitrary syntax between fixed delimiters without -introducing deeply nested states. - -The `using()` helper has a special keyword argument, `state`, which works as -follows: if given, the lexer to use initially is not in the ``"root"`` state, -but in the state given by this argument. This does not work with advanced -`RegexLexer` subclasses such as `ExtendedRegexLexer` (see below). - -Any other keywords arguments passed to `using()` are added to the keyword -arguments used to create the lexer. - - -Delegating Lexer -================ - -Another approach for nested lexers is the `DelegatingLexer` which is for example -used for the template engine lexers. It takes two lexers as arguments on -initialisation: a `root_lexer` and a `language_lexer`. - -The input is processed as follows: First, the whole text is lexed with the -`language_lexer`. All tokens yielded with the special type of ``Other`` are -then concatenated and given to the `root_lexer`. The language tokens of the -`language_lexer` are then inserted into the `root_lexer`'s token stream at the -appropriate positions. :: - - from pygments.lexer import DelegatingLexer - from pygments.lexers.web import HtmlLexer, PhpLexer - - class HtmlPhpLexer(DelegatingLexer): - def __init__(self, **options): - super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options) - -This procedure ensures that e.g. HTML with template tags in it is highlighted -correctly even if the template tags are put into HTML tags or attributes. - -If you want to change the needle token ``Other`` to something else, you can give -the lexer another token type as the third parameter:: - - DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options) - - -Callbacks -========= - -Sometimes the grammar of a language is so complex that a lexer would be unable -to process it just by using regular expressions and stacks. - -For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead -of token types (`bygroups` and `using` are nothing else but preimplemented -callbacks). The callback must be a function taking two arguments: - -* the lexer itself -* the match object for the last matched rule - -The callback must then return an iterable of (or simply yield) ``(index, -tokentype, value)`` tuples, which are then just passed through by -`get_tokens_unprocessed()`. The ``index`` here is the position of the token in -the input string, ``tokentype`` is the normal token type (like `Name.Builtin`), -and ``value`` the associated part of the input string. - -You can see an example here:: - - from pygments.lexer import RegexLexer - from pygments.token import Generic - - class HypotheticLexer(RegexLexer): - - def headline_callback(lexer, match): - equal_signs = match.group(1) - text = match.group(2) - yield match.start(), Generic.Headline, equal_signs + text + equal_signs - - tokens = { - 'root': [ - (r'(=+)(.*?)(\1)', headline_callback) - ] - } - -If the regex for the `headline_callback` matches, the function is called with -the match object. Note that after the callback is done, processing continues -normally, that is, after the end of the previous match. The callback has no -possibility to influence the position. - -There are not really any simple examples for lexer callbacks, but you can see -them in action e.g. in the `SMLLexer` class in `ml.py`_. - -.. _ml.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ml.py - - -The ExtendedRegexLexer class -============================ - -The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for -the funky syntax rules of languages such as Ruby. - -But fear not; even then you don't have to abandon the regular expression -approach: Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`. -All features known from RegexLexers are available here too, and the tokens are -specified in exactly the same way, *except* for one detail: - -The `get_tokens_unprocessed()` method holds its internal state data not as local -variables, but in an instance of the `pygments.lexer.LexerContext` class, and -that instance is passed to callbacks as a third argument. This means that you -can modify the lexer state in callbacks. - -The `LexerContext` class has the following members: - -* `text` -- the input text -* `pos` -- the current starting position that is used for matching regexes -* `stack` -- a list containing the state stack -* `end` -- the maximum position to which regexes are matched, this defaults to - the length of `text` - -Additionally, the `get_tokens_unprocessed()` method can be given a -`LexerContext` instead of a string and will then process this context instead of -creating a new one for the string argument. - -Note that because you can set the current position to anything in the callback, -it won't be automatically be set by the caller after the callback is finished. -For example, this is how the hypothetical lexer above would be written with the -`ExtendedRegexLexer`:: - - from pygments.lexer import ExtendedRegexLexer - from pygments.token import Generic - - class ExHypotheticLexer(ExtendedRegexLexer): - - def headline_callback(lexer, match, ctx): - equal_signs = match.group(1) - text = match.group(2) - yield match.start(), Generic.Headline, equal_signs + text + equal_signs - ctx.pos = match.end() - - tokens = { - 'root': [ - (r'(=+)(.*?)(\1)', headline_callback) - ] - } - -This might sound confusing (and it can really be). But it is needed, and for an -example look at the Ruby lexer in `ruby.py`_. - -.. _ruby.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ruby.py - - -Handling Lists of Keywords -========================== - -For a relatively short list (hundreds) you can construct an optimized regular -expression directly using ``words()`` (longer lists, see next section). This -function handles a few things for you automatically, including escaping -metacharacters and Python's first-match rather than longest-match in -alternations. Feel free to put the lists themselves in -``pygments/lexers/_$lang_builtins.py`` (see examples there), and generated by -code if possible. - -An example of using ``words()`` is something like:: - - from pygments.lexer import RegexLexer, words, Name - - class MyLexer(RegexLexer): - - tokens = { - 'root': [ - (words(('else', 'elseif'), suffix=r'\b'), Name.Builtin), - (r'\w+', Name), - ], - } - -As you can see, you can add ``prefix`` and ``suffix`` parts to the constructed -regex. - - -Modifying Token Streams -======================= - -Some languages ship a lot of builtin functions (for example PHP). The total -amount of those functions differs from system to system because not everybody -has every extension installed. In the case of PHP there are over 3000 builtin -functions. That's an incredibly huge amount of functions, much more than you -want to put into a regular expression. - -But because only `Name` tokens can be function names this is solvable by -overriding the ``get_tokens_unprocessed()`` method. The following lexer -subclasses the `PythonLexer` so that it highlights some additional names as -pseudo keywords:: - - from pygments.lexers.python import PythonLexer - from pygments.token import Name, Keyword - - class MyPythonLexer(PythonLexer): - EXTRA_KEYWORDS = set(('foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs')) - - def get_tokens_unprocessed(self, text): - for index, token, value in PythonLexer.get_tokens_unprocessed(self, text): - if token is Name and value in self.EXTRA_KEYWORDS: - yield index, Keyword.Pseudo, value - else: - yield index, token, value - -The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions. diff --git a/doc/docs/lexers.rst b/doc/docs/lexers.rst deleted file mode 100644 index ef40f140..00000000 --- a/doc/docs/lexers.rst +++ /dev/null @@ -1,69 +0,0 @@ -.. -*- mode: rst -*- - -================ -Available lexers -================ - -This page lists all available builtin lexers and the options they take. - -Currently, **all lexers** support these options: - -`stripnl` - Strip leading and trailing newlines from the input (default: ``True``) - -`stripall` - Strip all leading and trailing whitespace from the input (default: - ``False``). - -`ensurenl` - Make sure that the input ends with a newline (default: ``True``). This - is required for some lexers that consume input linewise. - - .. versionadded:: 1.3 - -`tabsize` - If given and greater than 0, expand tabs in the input (default: ``0``). - -`encoding` - If given, must be an encoding name (such as ``"utf-8"``). This encoding - will be used to convert the input string to Unicode (if it is not already - a Unicode string). The default is ``"guess"``. - - If this option is set to ``"guess"``, a simple UTF-8 vs. Latin-1 - detection is used, if it is set to ``"chardet"``, the - `chardet library `_ is used to - guess the encoding of the input. - - .. versionadded:: 0.6 - - -The "Short Names" field lists the identifiers that can be used with the -`get_lexer_by_name()` function. - -These lexers are builtin and can be imported from `pygments.lexers`: - -.. pygmentsdoc:: lexers - - -Iterating over all lexers -------------------------- - -.. versionadded:: 0.6 - -To get all lexers (both the builtin and the plugin ones), you can -use the `get_all_lexers()` function from the `pygments.lexers` -module: - -.. sourcecode:: pycon - - >>> from pygments.lexers import get_all_lexers - >>> i = get_all_lexers() - >>> i.next() - ('Diff', ('diff',), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')) - >>> i.next() - ('Delphi', ('delphi', 'objectpascal', 'pas', 'pascal'), ('*.pas',), ('text/x-pascal',)) - >>> i.next() - ('XML+Ruby', ('xml+erb', 'xml+ruby'), (), ()) - -As you can see, the return value is an iterator which yields tuples -in the form ``(name, aliases, filetypes, mimetypes)``. diff --git a/doc/docs/moinmoin.rst b/doc/docs/moinmoin.rst deleted file mode 100644 index 8b2216b3..00000000 --- a/doc/docs/moinmoin.rst +++ /dev/null @@ -1,39 +0,0 @@ -.. -*- mode: rst -*- - -============================ -Using Pygments with MoinMoin -============================ - -From Pygments 0.7, the source distribution ships a `Moin`_ parser plugin that -can be used to get Pygments highlighting in Moin wiki pages. - -To use it, copy the file `external/moin-parser.py` from the Pygments -distribution to the `data/plugin/parser` subdirectory of your Moin instance. -Edit the options at the top of the file (currently ``ATTACHMENTS`` and -``INLINESTYLES``) and rename the file to the name that the parser directive -should have. For example, if you name the file ``code.py``, you can get a -highlighted Python code sample with this Wiki markup:: - - {{{ - #!code python - [...] - }}} - -where ``python`` is the Pygments name of the lexer to use. - -Additionally, if you set the ``ATTACHMENTS`` option to True, Pygments will also -be called for all attachments for whose filenames there is no other parser -registered. - -You are responsible for including CSS rules that will map the Pygments CSS -classes to colors. You can output a stylesheet file with `pygmentize`, put it -into the `htdocs` directory of your Moin instance and then include it in the -`stylesheets` configuration option in the Moin config, e.g.:: - - stylesheets = [('screen', '/htdocs/pygments.css')] - -If you do not want to do that and are willing to accept larger HTML output, you -can set the ``INLINESTYLES`` option to True. - - -.. _Moin: http://moinmoin.wikiwikiweb.de/ diff --git a/doc/docs/plugins.rst b/doc/docs/plugins.rst deleted file mode 100644 index a6f8d7b0..00000000 --- a/doc/docs/plugins.rst +++ /dev/null @@ -1,93 +0,0 @@ -================ -Register Plugins -================ - -If you want to extend Pygments without hacking the sources, but want to -use the lexer/formatter/style/filter lookup functions (`lexers.get_lexer_by_name` -et al.), you can use `setuptools`_ entrypoints to add new lexers, formatters -or styles as if they were in the Pygments core. - -.. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools - -That means you can use your highlighter modules with the `pygmentize` script, -which relies on the mentioned functions. - - -Entrypoints -=========== - -Here is a list of setuptools entrypoints that Pygments understands: - -`pygments.lexers` - - This entrypoint is used for adding new lexers to the Pygments core. - The name of the entrypoint values doesn't really matter, Pygments extracts - required metadata from the class definition: - - .. sourcecode:: ini - - [pygments.lexers] - yourlexer = yourmodule:YourLexer - - Note that you have to define ``name``, ``aliases`` and ``filename`` - attributes so that you can use the highlighter from the command line: - - .. sourcecode:: python - - class YourLexer(...): - name = 'Name Of Your Lexer' - aliases = ['alias'] - filenames = ['*.ext'] - - -`pygments.formatters` - - You can use this entrypoint to add new formatters to Pygments. The - name of an entrypoint item is the name of the formatter. If you - prefix the name with a slash it's used as a filename pattern: - - .. sourcecode:: ini - - [pygments.formatters] - yourformatter = yourmodule:YourFormatter - /.ext = yourmodule:YourFormatter - - -`pygments.styles` - - To add a new style you can use this entrypoint. The name of the entrypoint - is the name of the style: - - .. sourcecode:: ini - - [pygments.styles] - yourstyle = yourmodule:YourStyle - - -`pygments.filters` - - Use this entrypoint to register a new filter. The name of the - entrypoint is the name of the filter: - - .. sourcecode:: ini - - [pygments.filters] - yourfilter = yourmodule:YourFilter - - -How To Use Entrypoints -====================== - -This documentation doesn't explain how to use those entrypoints because this is -covered in the `setuptools documentation`_. That page should cover everything -you need to write a plugin. - -.. _setuptools documentation: http://peak.telecommunity.com/DevCenter/setuptools - - -Extending The Core -================== - -If you have written a Pygments plugin that is open source, please inform us -about that. There is a high chance that we'll add it to the Pygments -distribution. diff --git a/doc/docs/quickstart.rst b/doc/docs/quickstart.rst deleted file mode 100644 index 3a823e7f..00000000 --- a/doc/docs/quickstart.rst +++ /dev/null @@ -1,205 +0,0 @@ -.. -*- mode: rst -*- - -=========================== -Introduction and Quickstart -=========================== - - -Welcome to Pygments! This document explains the basic concepts and terms and -gives a few examples of how to use the library. - - -Architecture -============ - -There are four types of components that work together highlighting a piece of -code: - -* A **lexer** splits the source into tokens, fragments of the source that - have a token type that determines what the text represents semantically - (e.g., keyword, string, or comment). There is a lexer for every language - or markup format that Pygments supports. -* The token stream can be piped through **filters**, which usually modify - the token types or text fragments, e.g. uppercasing all keywords. -* A **formatter** then takes the token stream and writes it to an output - file, in a format such as HTML, LaTeX or RTF. -* While writing the output, a **style** determines how to highlight all the - different token types. It maps them to attributes like "red and bold". - - -Example -======= - -Here is a small example for highlighting Python code: - -.. sourcecode:: python - - from pygments import highlight - from pygments.lexers import PythonLexer - from pygments.formatters import HtmlFormatter - - code = 'print "Hello World"' - print(highlight(code, PythonLexer(), HtmlFormatter())) - -which prints something like this: - -.. sourcecode:: html - -
-
print "Hello World"
-
- -As you can see, Pygments uses CSS classes (by default, but you can change that) -instead of inline styles in order to avoid outputting redundant style information over -and over. A CSS stylesheet that contains all CSS classes possibly used in the output -can be produced by: - -.. sourcecode:: python - - print(HtmlFormatter().get_style_defs('.highlight')) - -The argument to :func:`get_style_defs` is used as an additional CSS selector: -the output may look like this: - -.. sourcecode:: css - - .highlight .k { color: #AA22FF; font-weight: bold } - .highlight .s { color: #BB4444 } - ... - - -Options -======= - -The :func:`highlight()` function supports a fourth argument called *outfile*, it -must be a file object if given. The formatted output will then be written to -this file instead of being returned as a string. - -Lexers and formatters both support options. They are given to them as keyword -arguments either to the class or to the lookup method: - -.. sourcecode:: python - - from pygments import highlight - from pygments.lexers import get_lexer_by_name - from pygments.formatters import HtmlFormatter - - lexer = get_lexer_by_name("python", stripall=True) - formatter = HtmlFormatter(linenos=True, cssclass="source") - result = highlight(code, lexer, formatter) - -This makes the lexer strip all leading and trailing whitespace from the input -(`stripall` option), lets the formatter output line numbers (`linenos` option), -and sets the wrapping ``
``'s class to ``source`` (instead of -``highlight``). - -Important options include: - -`encoding` : for lexers and formatters - Since Pygments uses Unicode strings internally, this determines which - encoding will be used to convert to or from byte strings. -`style` : for formatters - The name of the style to use when writing the output. - - -For an overview of builtin lexers and formatters and their options, visit the -:doc:`lexer ` and :doc:`formatters ` lists. - -For a documentation on filters, see :doc:`this page `. - - -Lexer and formatter lookup -========================== - -If you want to lookup a built-in lexer by its alias or a filename, you can use -one of the following methods: - -.. sourcecode:: pycon - - >>> from pygments.lexers import (get_lexer_by_name, - ... get_lexer_for_filename, get_lexer_for_mimetype) - - >>> get_lexer_by_name('python') - - - >>> get_lexer_for_filename('spam.rb') - - - >>> get_lexer_for_mimetype('text/x-perl') - - -All these functions accept keyword arguments; they will be passed to the lexer -as options. - -A similar API is available for formatters: use :func:`.get_formatter_by_name()` -and :func:`.get_formatter_for_filename()` from the :mod:`pygments.formatters` -module for this purpose. - - -Guessing lexers -=============== - -If you don't know the content of the file, or you want to highlight a file -whose extension is ambiguous, such as ``.html`` (which could contain plain HTML -or some template tags), use these functions: - -.. sourcecode:: pycon - - >>> from pygments.lexers import guess_lexer, guess_lexer_for_filename - - >>> guess_lexer('#!/usr/bin/python\nprint "Hello World!"') - - - >>> guess_lexer_for_filename('test.py', 'print "Hello World!"') - - -:func:`.guess_lexer()` passes the given content to the lexer classes' -:meth:`analyse_text()` method and returns the one for which it returns the -highest number. - -All lexers have two different filename pattern lists: the primary and the -secondary one. The :func:`.get_lexer_for_filename()` function only uses the -primary list, whose entries are supposed to be unique among all lexers. -:func:`.guess_lexer_for_filename()`, however, will first loop through all lexers -and look at the primary and secondary filename patterns if the filename matches. -If only one lexer matches, it is returned, else the guessing mechanism of -:func:`.guess_lexer()` is used with the matching lexers. - -As usual, keyword arguments to these functions are given to the created lexer -as options. - - -Command line usage -================== - -You can use Pygments from the command line, using the :program:`pygmentize` -script:: - - $ pygmentize test.py - -will highlight the Python file test.py using ANSI escape sequences -(a.k.a. terminal colors) and print the result to standard output. - -To output HTML, use the ``-f`` option:: - - $ pygmentize -f html -o test.html test.py - -to write an HTML-highlighted version of test.py to the file test.html. -Note that it will only be a snippet of HTML, if you want a full HTML document, -use the "full" option:: - - $ pygmentize -f html -O full -o test.html test.py - -This will produce a full HTML document with included stylesheet. - -A style can be selected with ``-O style=``. - -If you need a stylesheet for an existing HTML file using Pygments CSS classes, -it can be created with:: - - $ pygmentize -S default -f html > style.css - -where ``default`` is the style name. - -More options and tricks and be found in the :doc:`command line reference -`. diff --git a/doc/docs/rstdirective.rst b/doc/docs/rstdirective.rst deleted file mode 100644 index c0d503b3..00000000 --- a/doc/docs/rstdirective.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. -*- mode: rst -*- - -================================ -Using Pygments in ReST documents -================================ - -Many Python people use `ReST`_ for documentation their sourcecode, programs, -scripts et cetera. This also means that documentation often includes sourcecode -samples or snippets. - -You can easily enable Pygments support for your ReST texts using a custom -directive -- this is also how this documentation displays source code. - -From Pygments 0.9, the directive is shipped in the distribution as -`external/rst-directive.py`. You can copy and adapt this code to your liking. - -.. removed -- too confusing - *Loosely related note:* The ReST lexer now recognizes ``.. sourcecode::`` and - ``.. code::`` directives and highlights the contents in the specified language - if the `handlecodeblocks` option is true. - -.. _ReST: http://docutils.sf.net/rst.html diff --git a/doc/docs/styles.rst b/doc/docs/styles.rst deleted file mode 100644 index 570293a5..00000000 --- a/doc/docs/styles.rst +++ /dev/null @@ -1,232 +0,0 @@ -.. -*- mode: rst -*- - -====== -Styles -====== - -Pygments comes with some builtin styles that work for both the HTML and -LaTeX formatter. - -The builtin styles can be looked up with the `get_style_by_name` function: - -.. sourcecode:: pycon - - >>> from pygments.styles import get_style_by_name - >>> get_style_by_name('colorful') - - -You can pass a instance of a `Style` class to a formatter as the `style` -option in form of a string: - -.. sourcecode:: pycon - - >>> from pygments.styles import get_style_by_name - >>> from pygments.formatters import HtmlFormatter - >>> HtmlFormatter(style='colorful').style - - -Or you can also import your own style (which must be a subclass of -`pygments.style.Style`) and pass it to the formatter: - -.. sourcecode:: pycon - - >>> from yourapp.yourmodule import YourStyle - >>> from pygments.formatters import HtmlFormatter - >>> HtmlFormatter(style=YourStyle).style - - - -Creating Own Styles -=================== - -So, how to create a style? All you have to do is to subclass `Style` and -define some styles: - -.. sourcecode:: python - - from pygments.style import Style - from pygments.token import Keyword, Name, Comment, String, Error, \ - Number, Operator, Generic - - class YourStyle(Style): - default_style = "" - styles = { - Comment: 'italic #888', - Keyword: 'bold #005', - Name: '#f00', - Name.Function: '#0f0', - Name.Class: 'bold #0f0', - String: 'bg:#eee #111' - } - -That's it. There are just a few rules. When you define a style for `Name` -the style automatically also affects `Name.Function` and so on. If you -defined ``'bold'`` and you don't want boldface for a subtoken use ``'nobold'``. - -(Philosophy: the styles aren't written in CSS syntax since this way -they can be used for a variety of formatters.) - -`default_style` is the style inherited by all token types. - -To make the style usable for Pygments, you must - -* either register it as a plugin (see :doc:`the plugin docs `) -* or drop it into the `styles` subpackage of your Pygments distribution one style - class per style, where the file name is the style name and the class name is - `StylenameClass`. For example, if your style should be called - ``"mondrian"``, name the class `MondrianStyle`, put it into the file - ``mondrian.py`` and this file into the ``pygments.styles`` subpackage - directory. - - -Style Rules -=========== - -Here a small overview of all allowed styles: - -``bold`` - render text as bold -``nobold`` - don't render text as bold (to prevent subtokens being highlighted bold) -``italic`` - render text italic -``noitalic`` - don't render text as italic -``underline`` - render text underlined -``nounderline`` - don't render text underlined -``bg:`` - transparent background -``bg:#000000`` - background color (black) -``border:`` - no border -``border:#ffffff`` - border color (white) -``#ff0000`` - text color (red) -``noinherit`` - don't inherit styles from supertoken - -Note that there may not be a space between ``bg:`` and the color value -since the style definition string is split at whitespace. -Also, using named colors is not allowed since the supported color names -vary for different formatters. - -Furthermore, not all lexers might support every style. - - -Builtin Styles -============== - -Pygments ships some builtin styles which are maintained by the Pygments team. - -To get a list of known styles you can use this snippet: - -.. sourcecode:: pycon - - >>> from pygments.styles import STYLE_MAP - >>> STYLE_MAP.keys() - ['default', 'emacs', 'friendly', 'colorful'] - - -Getting a list of available styles -================================== - -.. versionadded:: 0.6 - -Because it could be that a plugin registered a style, there is -a way to iterate over all styles: - -.. sourcecode:: pycon - - >>> from pygments.styles import get_all_styles - >>> styles = list(get_all_styles()) - - -.. _AnsiTerminalStyle: - -Terminal Styles -=============== - -.. versionadded:: 2.2 - -Custom styles used with the 256-color terminal formatter can also map colors to -use the 8 default ANSI colors. To do so, use ``ansigreen``, ``ansibrightred`` or -any other colors defined in :attr:`pygments.style.ansicolors`. Foreground ANSI -colors will be mapped to the corresponding `escape codes 30 to 37 -`_ thus respecting any -custom color mapping and themes provided by many terminal emulators. Light -variants are treated as foreground color with and an added bold flag. -``bg:ansi`` will also be respected, except the light variant will be the -same shade as their dark variant. - -See the following example where the color of the string ``"hello world"`` is -governed by the escape sequence ``\x1b[34;01m`` (Ansi bright blue, Bold, 41 being red -background) instead of an extended foreground & background color. - -.. sourcecode:: pycon - - >>> from pygments import highlight - >>> from pygments.style import Style - >>> from pygments.token import Token - >>> from pygments.lexers import Python3Lexer - >>> from pygments.formatters import Terminal256Formatter - - >>> class MyStyle(Style): - styles = { - Token.String: 'ansibrightblue bg:ansibrightred', - } - - >>> code = 'print("Hello World")' - >>> result = highlight(code, Python3Lexer(), Terminal256Formatter(style=MyStyle)) - >>> print(result.encode()) - b'\x1b[34;41;01m"\x1b[39;49;00m\x1b[34;41;01mHello World\x1b[39;49;00m\x1b[34;41;01m"\x1b[39;49;00m' - -Colors specified using ``ansi*`` are converted to a default set of RGB colors -when used with formatters other than the terminal-256 formatter. - -By definition of ANSI, the following colors are considered "light" colors, and -will be rendered by most terminals as bold: - -- "brightblack" (darkgrey), "brightred", "brightgreen", "brightyellow", "brightblue", - "brightmagenta", "brightcyan", "white" - -The following are considered "dark" colors and will be rendered as non-bold: - -- "black", "red", "green", "yellow", "blue", "magenta", "cyan", - "gray" - -Exact behavior might depends on the terminal emulator you are using, and its -settings. - -.. _new-ansi-color-names: - -.. versionchanged:: 2.4 - -The definition of the ANSI color names has changed. -New names are easier to understand and align to the colors used in other projects. - -===================== ==================== -New names Pygments up to 2.3 -===================== ==================== -``ansiblack`` ``#ansiblack`` -``ansired`` ``#ansidarkred`` -``ansigreen`` ``#ansidarkgreen`` -``ansiyellow`` ``#ansibrown`` -``ansiblue`` ``#ansidarkblue`` -``ansimagenta`` ``#ansipurple`` -``ansicyan`` ``#ansiteal`` -``ansigray`` ``#ansilightgray`` -``ansibrightblack`` ``#ansidarkgray`` -``ansibrightred`` ``#ansired`` -``ansibrightgreen`` ``#ansigreen`` -``ansibrightyellow`` ``#ansiyellow`` -``ansibrightblue`` ``#ansiblue`` -``ansibrightmagenta`` ``#ansifuchsia`` -``ansibrightcyan`` ``#ansiturquoise`` -``ansiwhite`` ``#ansiwhite`` -===================== ==================== - -Old ANSI color names are deprecated but will still work. diff --git a/doc/docs/tokens.rst b/doc/docs/tokens.rst deleted file mode 100644 index 801fc638..00000000 --- a/doc/docs/tokens.rst +++ /dev/null @@ -1,372 +0,0 @@ -.. -*- mode: rst -*- - -============== -Builtin Tokens -============== - -.. module:: pygments.token - -In the :mod:`pygments.token` module, there is a special object called `Token` -that is used to create token types. - -You can create a new token type by accessing an attribute of `Token`: - -.. sourcecode:: pycon - - >>> from pygments.token import Token - >>> Token.String - Token.String - >>> Token.String is Token.String - True - -Note that tokens are singletons so you can use the ``is`` operator for comparing -token types. - -As of Pygments 0.7 you can also use the ``in`` operator to perform set tests: - -.. sourcecode:: pycon - - >>> from pygments.token import Comment - >>> Comment.Single in Comment - True - >>> Comment in Comment.Multi - False - -This can be useful in :doc:`filters ` and if you write lexers on your -own without using the base lexers. - -You can also split a token type into a hierarchy, and get the parent of it: - -.. sourcecode:: pycon - - >>> String.split() - [Token, Token.Literal, Token.Literal.String] - >>> String.parent - Token.Literal - -In principle, you can create an unlimited number of token types but nobody can -guarantee that a style would define style rules for a token type. Because of -that, Pygments proposes some global token types defined in the -`pygments.token.STANDARD_TYPES` dict. - -For some tokens aliases are already defined: - -.. sourcecode:: pycon - - >>> from pygments.token import String - >>> String - Token.Literal.String - -Inside the :mod:`pygments.token` module the following aliases are defined: - -============= ============================ ==================================== -`Text` `Token.Text` for any type of text data -`Whitespace` `Token.Text.Whitespace` for specially highlighted whitespace -`Error` `Token.Error` represents lexer errors -`Other` `Token.Other` special token for data not - matched by a parser (e.g. HTML - markup in PHP code) -`Keyword` `Token.Keyword` any kind of keywords -`Name` `Token.Name` variable/function names -`Literal` `Token.Literal` Any literals -`String` `Token.Literal.String` string literals -`Number` `Token.Literal.Number` number literals -`Operator` `Token.Operator` operators (``+``, ``not``...) -`Punctuation` `Token.Punctuation` punctuation (``[``, ``(``...) -`Comment` `Token.Comment` any kind of comments -`Generic` `Token.Generic` generic tokens (have a look at - the explanation below) -============= ============================ ==================================== - -The `Whitespace` token type is new in Pygments 0.8. It is used only by the -`VisibleWhitespaceFilter` currently. - -Normally you just create token types using the already defined aliases. For each -of those token aliases, a number of subtypes exists (excluding the special tokens -`Token.Text`, `Token.Error` and `Token.Other`) - -The `is_token_subtype()` function in the `pygments.token` module can be used to -test if a token type is a subtype of another (such as `Name.Tag` and `Name`). -(This is the same as ``Name.Tag in Name``. The overloaded `in` operator was newly -introduced in Pygments 0.7, the function still exists for backwards -compatibility.) - -With Pygments 0.7, it's also possible to convert strings to token types (for example -if you want to supply a token from the command line): - -.. sourcecode:: pycon - - >>> from pygments.token import String, string_to_tokentype - >>> string_to_tokentype("String") - Token.Literal.String - >>> string_to_tokentype("Token.Literal.String") - Token.Literal.String - >>> string_to_tokentype(String) - Token.Literal.String - - -Keyword Tokens -============== - -`Keyword` - For any kind of keyword (especially if it doesn't match any of the - subtypes of course). - -`Keyword.Constant` - For keywords that are constants (e.g. ``None`` in future Python versions). - -`Keyword.Declaration` - For keywords used for variable declaration (e.g. ``var`` in some programming - languages like JavaScript). - -`Keyword.Namespace` - For keywords used for namespace declarations (e.g. ``import`` in Python and - Java and ``package`` in Java). - -`Keyword.Pseudo` - For keywords that aren't really keywords (e.g. ``None`` in old Python - versions). - -`Keyword.Reserved` - For reserved keywords. - -`Keyword.Type` - For builtin types that can't be used as identifiers (e.g. ``int``, - ``char`` etc. in C). - - -Name Tokens -=========== - -`Name` - For any name (variable names, function names, classes). - -`Name.Attribute` - For all attributes (e.g. in HTML tags). - -`Name.Builtin` - Builtin names; names that are available in the global namespace. - -`Name.Builtin.Pseudo` - Builtin names that are implicit (e.g. ``self`` in Ruby, ``this`` in Java). - -`Name.Class` - Class names. Because no lexer can know if a name is a class or a function - or something else this token is meant for class declarations. - -`Name.Constant` - Token type for constants. In some languages you can recognise a token by the - way it's defined (the value after a ``const`` keyword for example). In - other languages constants are uppercase by definition (Ruby). - -`Name.Decorator` - Token type for decorators. Decorators are syntactic elements in the Python - language. Similar syntax elements exist in C# and Java. - -`Name.Entity` - Token type for special entities. (e.g. `` `` in HTML). - -`Name.Exception` - Token type for exception names (e.g. ``RuntimeError`` in Python). Some languages - define exceptions in the function signature (Java). You can highlight - the name of that exception using this token then. - -`Name.Function` - Token type for function names. - -`Name.Function.Magic` - same as `Name.Function` but for special function names that have an implicit use - in a language (e.g. ``__init__`` method in Python). - -`Name.Label` - Token type for label names (e.g. in languages that support ``goto``). - -`Name.Namespace` - Token type for namespaces. (e.g. import paths in Java/Python), names following - the ``module``/``namespace`` keyword in other languages. - -`Name.Other` - Other names. Normally unused. - -`Name.Tag` - Tag names (in HTML/XML markup or configuration files). - -`Name.Variable` - Token type for variables. Some languages have prefixes for variable names - (PHP, Ruby, Perl). You can highlight them using this token. - -`Name.Variable.Class` - same as `Name.Variable` but for class variables (also static variables). - -`Name.Variable.Global` - same as `Name.Variable` but for global variables (used in Ruby, for - example). - -`Name.Variable.Instance` - same as `Name.Variable` but for instance variables. - -`Name.Variable.Magic` - same as `Name.Variable` but for special variable names that have an implicit use - in a language (e.g. ``__doc__`` in Python). - - -Literals -======== - -`Literal` - For any literal (if not further defined). - -`Literal.Date` - for date literals (e.g. ``42d`` in Boo). - - -`String` - For any string literal. - -`String.Affix` - Token type for affixes that further specify the type of the string they're - attached to (e.g. the prefixes ``r`` and ``u8`` in ``r"foo"`` and ``u8"foo"``). - -`String.Backtick` - Token type for strings enclosed in backticks. - -`String.Char` - Token type for single characters (e.g. Java, C). - -`String.Delimiter` - Token type for delimiting identifiers in "heredoc", raw and other similar - strings (e.g. the word ``END`` in Perl code ``print <<'END';``). - -`String.Doc` - Token type for documentation strings (for example Python). - -`String.Double` - Double quoted strings. - -`String.Escape` - Token type for escape sequences in strings. - -`String.Heredoc` - Token type for "heredoc" strings (e.g. in Ruby or Perl). - -`String.Interpol` - Token type for interpolated parts in strings (e.g. ``#{foo}`` in Ruby). - -`String.Other` - Token type for any other strings (for example ``%q{foo}`` string constructs - in Ruby). - -`String.Regex` - Token type for regular expression literals (e.g. ``/foo/`` in JavaScript). - -`String.Single` - Token type for single quoted strings. - -`String.Symbol` - Token type for symbols (e.g. ``:foo`` in LISP or Ruby). - - -`Number` - Token type for any number literal. - -`Number.Bin` - Token type for binary literals (e.g. ``0b101010``). - -`Number.Float` - Token type for float literals (e.g. ``42.0``). - -`Number.Hex` - Token type for hexadecimal number literals (e.g. ``0xdeadbeef``). - -`Number.Integer` - Token type for integer literals (e.g. ``42``). - -`Number.Integer.Long` - Token type for long integer literals (e.g. ``42L`` in Python). - -`Number.Oct` - Token type for octal literals. - - -Operators -========= - -`Operator` - For any punctuation operator (e.g. ``+``, ``-``). - -`Operator.Word` - For any operator that is a word (e.g. ``not``). - - -Punctuation -=========== - -.. versionadded:: 0.7 - -`Punctuation` - For any punctuation which is not an operator (e.g. ``[``, ``(``...) - - -Comments -======== - -`Comment` - Token type for any comment. - -`Comment.Hashbang` - Token type for hashbang comments (i.e. first lines of files that start with - ``#!``). - -`Comment.Multiline` - Token type for multiline comments. - -`Comment.Preproc` - Token type for preprocessor comments (also ```. - -.. versionadded:: 0.7 - The formatters now also accept an `outencoding` option which will override - the `encoding` option if given. This makes it possible to use a single - options dict with lexers and formatters, and still have different input and - output encodings. - -.. _chardet: https://chardet.github.io/ diff --git a/doc/download.rst b/doc/download.rst deleted file mode 100644 index cf32f481..00000000 --- a/doc/download.rst +++ /dev/null @@ -1,41 +0,0 @@ -Download and installation -========================= - -The current release is version |version|. - -Packaged versions ------------------ - -You can download it `from the Python Package Index -`_. For installation of packages from -PyPI, we recommend `Pip `_, which works on all -major platforms. - -Under Linux, most distributions include a package for Pygments, usually called -``pygments`` or ``python-pygments``. You can install it with the package -manager as usual. - -Development sources -------------------- - -We're using the `Mercurial `_ version control -system. You can get the development source using this command:: - - hg clone http://bitbucket.org/birkenfeld/pygments-main pygments - -Development takes place at `Bitbucket -`_, you can browse the source -online `here `_. - -The latest changes in the development source code are listed in the `changelog -`_. - -.. Documentation - ------------- - -.. XXX todo - - You can download the documentation either as - a bunch of rst files from the Mercurial repository, see above, or - as a tar.gz containing rendered HTML files:

-

pygmentsdocs.tar.gz

diff --git a/doc/faq.rst b/doc/faq.rst deleted file mode 100644 index 172929e0..00000000 --- a/doc/faq.rst +++ /dev/null @@ -1,139 +0,0 @@ -:orphan: - -Pygments FAQ -============= - -What is Pygments? ------------------ - -Pygments is a syntax highlighting engine written in Python. That means, it will -take source code (or other markup) in a supported language and output a -processed version (in different formats) containing syntax highlighting markup. - -Its features include: - -* a wide range of common :doc:`languages and markup formats ` is supported -* new languages and formats are added easily -* a number of output formats is available, including: - - - HTML - - ANSI sequences (console output) - - LaTeX - - RTF - -* it is usable as a command-line tool and as a library -* parsing and formatting is fast - -Pygments is licensed under the BSD license. - -Where does the name Pygments come from? ---------------------------------------- - -*Py* of course stands for Python, while *pigments* are used for coloring paint, -and in this case, source code! - -What are the system requirements? ---------------------------------- - -Pygments only needs a standard Python install, version 2.7 or higher or version -3.5 or higher for Python 3. No additional libraries are needed. - -How can I use Pygments? ------------------------ - -Pygments is usable as a command-line tool as well as a library. - -From the command-line, usage looks like this (assuming the pygmentize script is -properly installed):: - - pygmentize -f html /path/to/file.py - -This will print a HTML-highlighted version of /path/to/file.py to standard output. - -For a complete help, please run ``pygmentize -h``. - -Usage as a library is thoroughly demonstrated in the Documentation section. - -How do I make a new style? --------------------------- - -Please see the :doc:`documentation on styles `. - -How can I report a bug or suggest a feature? --------------------------------------------- - -Please report bugs and feature wishes in the tracker at Bitbucket. - -You can also e-mail the author or use IRC, see the contact details. - -I want this support for this language! --------------------------------------- - -Instead of waiting for others to include language support, why not write it -yourself? All you have to know is :doc:`outlined in the docs -`. - -Can I use Pygments for programming language processing? -------------------------------------------------------- - -The Pygments lexing machinery is quite powerful can be used to build lexers for -basically all languages. However, parsing them is not possible, though some -lexers go some steps in this direction in order to e.g. highlight function names -differently. - -Also, error reporting is not the scope of Pygments. It focuses on correctly -highlighting syntactically valid documents, not finding and compensating errors. - -Who uses Pygments? ------------------- - -This is an (incomplete) list of projects and sites known to use the Pygments highlighter. - -* `Wikipedia `_ -* `BitBucket `_, a Mercurial and Git hosting site -* `The Sphinx documentation builder `_, for embedded source examples -* `rst2pdf `_, a reStructuredText to PDF converter -* `Codecov `_, a code coverage CI service -* `Trac `_, the universal project management tool -* `AsciiDoc `_, a text-based documentation generator -* `ActiveState Code `_, the Python Cookbook successor -* `ViewVC `_, a web-based version control repository browser -* `BzrFruit `_, a Bazaar branch viewer -* `QBzr `_, a cross-platform Qt-based GUI front end for Bazaar -* `Review Board `_, a collaborative code reviewing tool -* `Diamanda `_, a Django powered wiki system with support for Pygments -* `Progopedia `_ (`English `_), - an encyclopedia of programming languages -* `Bruce `_, a reStructuredText presentation tool -* `PIDA `_, a universal IDE written in Python -* `BPython `_, a curses-based intelligent Python shell -* `PuDB `_, a console Python debugger -* `XWiki `_, a wiki-based development framework in Java, using Jython -* `roux `_, a script for running R scripts - and creating beautiful output including graphs -* `hurl `_, a web service for making HTTP requests -* `wxHTMLPygmentizer `_ is - a GUI utility, used to make code-colorization easier -* `Postmarkup `_, a BBCode to XHTML generator -* `WpPygments `_, and `WPygments - `_, highlighter plugins for WordPress -* `Siafoo `_, a tool for sharing and storing useful code and programming experience -* `D source `_, a community for the D programming language -* `dpaste.com `_, another Django pastebin -* `Django snippets `_, a pastebin for Django code -* `Fayaa `_, a Chinese pastebin -* `Incollo.com `_, a free collaborative debugging tool -* `PasteBox `_, a pastebin focused on privacy -* `hilite.me `_, a site to highlight code snippets -* `patx.me `_, a pastebin -* `Fluidic `_, an experiment in - integrating shells with a GUI -* `pygments.rb `_, a pygments wrapper for Ruby -* `Clygments `_, a pygments wrapper for - Clojure -* `PHPygments `_, a pygments wrapper for PHP - - -If you have a project or web site using Pygments, drop me a line, and I'll add a -link here. - diff --git a/doc/index.rst b/doc/index.rst deleted file mode 100644 index 26114045..00000000 --- a/doc/index.rst +++ /dev/null @@ -1,54 +0,0 @@ -Welcome! -======== - -This is the home of Pygments. It is a generic syntax highlighter suitable for -use in code hosting, forums, wikis or other applications that need to prettify -source code. Highlights are: - -* a wide range of over 300 languages and other text formats is supported -* special attention is paid to details that increase highlighting quality -* support for new languages and formats are added easily; most languages use a - simple regex-based lexing mechanism -* a number of output formats is available, among them HTML, RTF, LaTeX and ANSI - sequences -* it is usable as a command-line tool and as a library -* ... and it highlights even Perl 6! - -Read more in the :doc:`FAQ list ` or the :doc:`documentation `, -or `download the latest release `_. - -.. _contribute: - -Contribute ----------- - -Like every open-source project, we are always looking for volunteers to help us -with programming. Python knowledge is required, but don't fear: Python is a very -clear and easy to learn language. - -Development takes place on `Bitbucket -`_, where the Mercurial -repository, tickets and pull requests can be viewed. - -Our primary communication instrument is the IRC channel **#pocoo** on the -Freenode network. To join it, let your IRC client connect to -``irc.freenode.net`` and do ``/join #pocoo``. - -If you found a bug, just open a ticket in the Bitbucket tracker. Be sure to log -in to be notified when the issue is fixed -- development is not fast-paced as -the library is quite stable. You can also send an e-mail to the developers, see -below. - -The authors ------------ - -Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*. - -Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of -the `Pocoo `_ team and **Tim Hatch**. - -.. toctree:: - :maxdepth: 1 - :hidden: - - docs/index diff --git a/doc/languages.rst b/doc/languages.rst deleted file mode 100644 index b06ccc55..00000000 --- a/doc/languages.rst +++ /dev/null @@ -1,170 +0,0 @@ -:orphan: - -Supported languages -=================== - -Pygments supports an ever-growing range of languages. Watch this space... - -Programming languages ---------------------- - -* ActionScript -* Ada -* ANTLR -* AppleScript -* Assembly (various) -* Asymptote -* `Augeas `_ -* Awk -* BBC Basic -* Befunge -* `Boa `_ -* Boo -* BrainFuck -* C, C++ -* C# -* `Charm++ CI `_ -* Clojure -* CoffeeScript -* ColdFusion -* Common Lisp -* Coq -* Cryptol (incl. Literate Cryptol) -* `Crystal `_ -* `Cython `_ -* `D `_ -* Dart -* DCPU-16 -* Delphi -* Dylan -* `Elm `_ -* Erlang -* `Ezhil `_ Ezhil - A Tamil programming language -* Factor -* Fancy -* `Fennel `_ -* `FloScript `_ -* Fortran -* `FreeFEM++ `_ -* F# -* GAP -* Gherkin (Cucumber) -* GL shaders -* Groovy -* `Haskell `_ (incl. Literate Haskell) -* HLSL -* `HSpec `_ -* IDL -* Io -* Java -* JavaScript -* Lasso -* LLVM -* Logtalk -* `Lua `_ -* Matlab -* MiniD -* Modelica -* Modula-2 -* MuPad -* Nemerle -* Nimrod -* Objective-C -* Objective-J -* Octave -* OCaml -* PHP -* `Perl 5 `_ and `Perl 6 `_ -* `Pony `_ -* PovRay -* PostScript -* PowerShell -* Prolog -* `Python `_ 2.x and 3.x (incl. console sessions and tracebacks) -* `REBOL `_ -* `Red `_ -* Redcode -* `Ruby `_ (incl. irb sessions) -* Rust -* S, S-Plus, R -* Scala -* Scheme -* Scilab -* `SGF `_ -* `Slash `_ -* `Slurm `_ -* Smalltalk -* SNOBOL -* Tcl -* `Tera Term language `_ -* `TOML `_ -* Vala -* Verilog -* VHDL -* Visual Basic.NET -* Visual FoxPro -* XQuery -* Zephir - -Template languages ------------------- - -* Cheetah templates -* `Django `_ / `Jinja - `_ templates -* ERB (Ruby templating) -* `Genshi `_ (the Trac template language) -* JSP (Java Server Pages) -* `Myghty `_ (the HTML::Mason based framework) -* `Mako `_ (the Myghty successor) -* `Smarty `_ templates (PHP templating) -* Tea - -Other markup ------------- - -* Apache config files -* Bash shell scripts -* BBCode -* CMake -* CSS -* Debian control files -* Diff files -* DTD -* Gettext catalogs -* Gnuplot script -* Groff markup -* HTML -* HTTP sessions -* INI-style config files -* IRC logs (irssi style) -* Lighttpd config files -* Makefiles -* MoinMoin/Trac Wiki markup -* MySQL -* Nginx config files -* POV-Ray scenes -* Ragel -* Redcode -* ReST -* Robot Framework -* RPM spec files -* SQL, also MySQL, SQLite -* Squid configuration -* TeX -* tcsh -* Vim Script -* Windows batch files -* XML -* XSLT -* YAML - -... that's all? ---------------- - -Well, why not write your own? Contributing to Pygments is easy and fun. Take a look at the -:doc:`docs on lexer development ` and -:ref:`contact details `. - -Note: the languages listed here are supported in the development version. The -latest release may lack a few of them. diff --git a/doc/make.bat b/doc/make.bat deleted file mode 100644 index 8803c985..00000000 --- a/doc/make.bat +++ /dev/null @@ -1,190 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Pygments.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Pygments.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -:end diff --git a/doc/pygmentize.1 b/doc/pygmentize.1 deleted file mode 100644 index 71bb6f9c..00000000 --- a/doc/pygmentize.1 +++ /dev/null @@ -1,94 +0,0 @@ -.TH PYGMENTIZE 1 "February 15, 2007" - -.SH NAME -pygmentize \- highlights the input file - -.SH SYNOPSIS -.B \fBpygmentize\fP -.RI [-l\ \fI\fP]\ [-F\ \fI\fP[:\fI\fP]]\ [-f\ \fI\fP] -.RI [-O\ \fI\fP]\ [-P\ \fI\fP]\ [-o\ \fI\fP]\ [\fI\fP] -.br -.B \fBpygmentize\fP -.RI -S\ \fI - - -

%(title)s

- -''' - -DOC_HEADER_EXTERNALCSS = '''\ - - - - - %(title)s - - - - -

%(title)s

- -''' - -DOC_FOOTER = '''\ - - -''' - - -class HtmlFormatter(Formatter): - r""" - Format tokens as HTML 4 ```` tags within a ``
`` tag, wrapped
-    in a ``
`` tag. The ``
``'s CSS class can be set by the `cssclass` - option. - - If the `linenos` option is set to ``"table"``, the ``
`` is
-    additionally wrapped inside a ```` which has one row and two
-    cells: one containing the line numbers and one containing the code.
-    Example:
-
-    .. sourcecode:: html
-
-        
-
- - -
-
1
-            2
-
-
def foo(bar):
-              pass
-            
-
- - (whitespace added to improve clarity). - - Wrapping can be disabled using the `nowrap` option. - - A list of lines can be specified using the `hl_lines` option to make these - lines highlighted (as of Pygments 0.11). - - With the `full` option, a complete HTML 4 document is output, including - the style definitions inside a `` - - -

Error

- - - -
Path: #{path}
-
#{CGI.escapeHTML(error.to_s)}
-
- Reload this page. - Go to the referer or the home page. -
-
- - In file '#{error.hot_file}' #{error.hot_file =~ /\.xhtml$/ ? '(line numbering is aproximate due to template transformation)' : nil}: -

- -
#{line}
- -
#{line}
- -
-

Stack Trace

- - - -

Request

- - -

Response

- - -

Session

- - -

- Powered by Nitro version #{Nitro::Version} - - - - - -

Home > System > #{"%plural%".humanize} > Edit #{"%name%".humanize}

- - Show editable - #{form_for @obj, :action => "#{base}/save", :cancel => "#{base}/list", :all => true} - - Show all - #{form_for @obj, :action => "#{base}/save", :cancel => "#{base}/list"} - -
-#{form_for(@%name%)} - - -

#{"%plural%".humanize}

-

New #{"%name%".humanize}

-
- Search #{"%plural%".humanize}:   -
- - - - - - - - - - - -
#{obj.to_s}#{obj.update_time.stamp(:db)}editdel
-
- - -

Home > System > #{"%plural%".humanize}

- New #{"%name%".humanize} -

-

- Search #{"%plural%".humanize}:   -
-

- - - - - - - - - - - -
#(obj.to_s)#{obj.update_time.stamp(:db)}editdel
-
- #{@pager.navigation} -
-
- - -

Home > System > #{"%plural%".humanize} > New #{"%name%".humanize}

- - Show editable - #{form_for @obj, :action => "#{base}/save", :cancel => "#{base}/list", :all => true, :enctype => "multipart/form-data"} - - Show all - #{form_for @obj, :action => "#{base}/save", :cancel => "#{base}/list", :enctype => "multipart/form-data"} - -
- - -

Home > System > #{"%plural%".humanize} > Search for '#@query'

-

-

- Search #{"%plural%".humanize}:   -
-

- -

Search method is not implemented for this object

- - - - - - - - - - - - -
#(obj.to_s)#{obj.update_time.stamp(:db)}editdel
-
- #{@pager.navigation} -
- -
- - -

View %name%

-

List of %plural%

- - #{@obj.to_yaml} - -
-Access denied - - -

Home > System

- -

Og managed classes

- - - - - - - - - - - - - - - - - -
ClassCountCleanupProperties
#{c.name}#{c.count}deletedestroy#{c.properties.values.join(', ')}
- -

System configuration

- - - - - - - - - - - - - - - - -
NameValueTypeDescription
#{s.owner}.#{s.name}#{s.value.inspect}#{s.type}#{s.options[:doc]}
-
- - - - - Test - - - - - - - -hello -Hello #{username} - -how do you feel? - -Here is your Token: #{token} - -
- -

Questions with Tags: #{@tags.join(" ")}

- - 0 ?> - - Too many results for that Tag, please reduce the number by using one of the following Tags: - #{cloud_of(@qtags)} - -
- -

#{q.question}

-

- - #{excerpt} -

-

#{q.answers.size.to_i} answers

- -
-
- #{@qpager.navigation} -
- -
-

no question with this/these tag(s) found

-

Ask a question here.

-
- - - 0 ?> -

Tips with Tags: #{@tags.join(" ")}

- - Too many results for that Tag, please reduce the number by using one of the following Tags: - #{cloud_of(@ttags)} - -
- -

#{t.title}

-

- - #{excerpt} -

- -
-
- #{@tpager.navigation} -
- - - 0 ?> -

Tutorials with Tags: #{@tags.join(" ")}

- - Too many results for that Tag, please reduce the number by using one of the following Tags: - #{cloud_of(@tuttags)} - -
- -

#{t.title}

-

- - #{excerpt} -

- -
-
- #{@tpager.navigation} -
- - - - -
- - - #{t.name} - -
- -
- - -
- - diff --git a/tests/examplefiles/example.xtend b/tests/examplefiles/example.xtend deleted file mode 100644 index f6a51f7a..00000000 --- a/tests/examplefiles/example.xtend +++ /dev/null @@ -1,34 +0,0 @@ -package beer - -import static extension beer.BottleSupport.* -import org.junit.Test - -class BottleSong { - - @Test - def void singIt() { - println(singTheSong(99)) - } - - def singTheSong(int all) ''' - FOR i : all .. 1 - i.Bottles of beer on the wall, i.bottles of beer. - Take one down and pass it around, (i - 1).bottles of beer on the wall. - - ENDFOR - No more bottles of beer on the wall, no more bottles of beer. - Go to the store and buy some more, all.bottles of beer on the wall. - ''' - - def private java.lang.String bottles(int i) { - switch i { - case 0 : 'no more bottles' - case 1 : 'one bottle' - default : '''i bottles''' - }.toString - } - - def String Bottles(int i) { - bottles(i).toFirstUpper - } -} \ No newline at end of file diff --git a/tests/examplefiles/example.xtm b/tests/examplefiles/example.xtm deleted file mode 100644 index 927117da..00000000 --- a/tests/examplefiles/example.xtm +++ /dev/null @@ -1,1101 +0,0 @@ -;;; example.xtm -- Extempore code examples - -;; Author: Ben Swift, Andrew Sorensen -;; Keywords: extempore - -;;; Commentary: - - - -;;; Code: - -;; bit twiddling - -(xtmtest '(bind-func test_bit_twiddle_1 - (lambda () - (bitwise-and 65535 255 15 1))) - - (test_bit_twiddle_1) 1) - -(xtmtest '(bind-func test_bit_twiddle_2 - (lambda () - (bitwise-not -1))) - - (test_bit_twiddle_2) 0) - -(xtmtest '(bind-func test_bit_twiddle_3 - (lambda () - (bitwise-not 0))) - - (test_bit_twiddle_3) -1) - -(xtmtest '(bind-func test_bit_twiddle_4 - (lambda () - (bitwise-shift-right 65535 8) - (bitwise-shift-right 65535 4 4))) - - (test_bit_twiddle_4) 255) - -(xtmtest '(bind-func test_bit_twiddle_5 - (lambda () - (bitwise-shift-left (bitwise-shift-right 65535 8) 4 4))) - - (test_bit_twiddle_5) 65280) - -(xtmtest '(bind-func test_bit_twiddle_6 - (lambda () - (bitwise-and (bitwise-or (bitwise-eor 21844 65534) (bitwise-eor 43690 65534)) 1))) - - (test_bit_twiddle_6) 0) - -;; integer literals default to 64 bit integers -(xtmtest '(bind-func int-literal-test - (lambda (a) - (* a 5))) - - (int-literal-test 6) 30) - -;; float literals default to doubles -(xtmtest '(bind-func float-literal-test - (lambda (a) - (* a 5.0))) - - (float-literal-test 6.0) 30.0) - -;; you are free to recompile an existing closure -(xtmtest '(bind-func int-literal-test - (lambda (a) - (/ a 5))) - - (int-literal-test 30)) - -(xtmtest '(bind-func closure-test1 - (let ((power 0)) - (lambda (x) - (set! power (+ power 1)) ;; set! for closure mutation as per scheme - (* x power)))) - - (closure-test1 2)) - -(xtmtest '(bind-func closure-returns-closure-test - (lambda () - (lambda (x) - (* x 3)))) - - (closure-returns-closure-test)) - -(xtmtest '(bind-func incrementer-test1 - (lambda (i:i64) - (lambda (incr) - (set! i (+ i incr)) - i))) - - (incrementer-test1 0)) - -(define myf (incrementer-test1 0)) - -;; so we need to type f properly -(xtmtest '(bind-func incrementer-test2 - (lambda (f:[i64,i64]* x) - (f x))) - (incrementer-test2 myf 1) 1) - -;; and we can call my-in-maker-wrapper -;; to appy myf -(xtmtest-result (incrementer-test2 myf 1) 2) -(xtmtest-result (incrementer-test2 myf 1) 3) -(xtmtest-result (incrementer-test2 myf 1) 4) - -;; of course the wrapper is only required if you -;; need interaction with the scheme world. -;; otherwise you just call my-inc-maker directly - -;; this avoids the wrapper completely -(xtmtest '(bind-func incrementer-test3 - (let ((f (incrementer-test1 0))) - (lambda () - (f 1)))) - - (incrementer-test3) 1) - -(xtmtest-result (incrementer-test3) 2) -(xtmtest-result (incrementer-test3) 3) - -;; hopefully you're getting the idea. -;; note that once we've compiled something -;; we can then use it any of our new -;; function definitions. - -;; do a little 16bit test -(xtmtest '(bind-func bitsize-sixteen - (lambda (a:i16) - (dtoi16 (* (i16tod a) 5.0)))) - - (bitsize-sixteen 5) 25) - -;; while loop test - -(xtmtest '(bind-func test_while_loop_1 - (lambda () - (let ((count 0)) - (while (< count 5) - (printf "count = %lld\n" count) - (set! count (+ count 1))) - count))) - - (test_while_loop_1) 5) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; Closures can be recursive -;; - -(xtmtest '(bind-func recursive-closure-test - (lambda (a) - (if (< a 1) - (printf "done\n") - (begin (printf "a: %lld\n" a) - (recursive-closure-test (- a 1)))))) - - (recursive-closure-test 3)) - -;; check TAIL OPTIMIZATION -;; if there is no tail call optimiation -;; in place then this should blow the -;; stack and crash the test - -;; CANNOT RUN THIS TEST ON WINDOWS (i.e. no salloc)! -(if (not (equal? (sys:platform) "Windows")) - (xtmtest '(bind-func tail_opt_test - (lambda (n:i64) - (let ((a:float* (salloc 8000))) - (if (= n 0) - (printf "tail opt test passed!\n") - (tail_opt_test (- n 1)))))) - - (tail_opt_test 200))) - -(println 'A 'segfault 'here 'incidates 'that 'tail-call-optimizations 'are 'not 'working!) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; some anon lambda tests -;; - -(xtmtest '(bind-func infer_lambdas_test - (lambda () - (let ((a 5) - (b (lambda (x) (* x x))) - (c (lambda (y) (* y y)))) - (c (b a))))) - - (infer_lambdas_test)) - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; a simple tuple example -;; -;; tuple types are represented as * -;; - -;; make and return a simple tuple -(xtmtest '(bind-func tuple-test1 - (lambda () - (let ((t:* (alloc))) - t))) - - (tuple-test1)) - -;; logview shows [*]* -;; i.e. a closure that takes no arguments -;; and returns the tuple * - - -;; here's another tuple example -;; note that my-test-7's return type is inferred -;; by the tuple-reference index -;; (i.e. i64 being tuple index 0) -(xtmtest '(bind-func tuple-test2 - (lambda () - (let ((a:* (alloc)) ; returns pointer to type - (b 37) - (c 6.4)) - (tuple-set! a 0 b) ;; set i64 to 64 - (tset! a 1 c) ;; set double to 6.4 - tset! is an alias for tuple-set! - (printf "tuple:1 %lld::%f\n" (tuple-ref a 0) (tref a 1)) - ;; we can fill a tuple in a single call by using tfill! - (tfill! a 77 77.7) - (printf "tuple:2 %lld::%f\n" (tuple-ref a 0) (tuple-ref a 1)) - (tuple-ref a 0)))) - - (tuple-test2) 77) - -;; return first element which is i64 -;; should be 64 as we return the -;; first element of the tuple -;; (println (my-test-7)) ; 77 - - -;; tbind binds variables to values -;; based on tuple structure -;; _ (underscore) means don't attempt -;; to match against this position in -;; the tuple (i.e. skip) -(xtmtest '(bind-func tuple-bind-test - (lambda () - (let ((t1:*,double>* (alloc)) - (t2:* (alloc)) - (a 0) (b:float 0.0) (c 0.0)) - (tfill! t2 3 3.3) - (tfill! t1 1 2.0 t2 4.0) - (tbind t1 a b _ c) - c))) - - (tuple-bind-test) 4.0) - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; some array code with *casting* -;; this function returns void -(xtmtest '(bind-func array-test1 - (lambda () - (let ((v1:|5,float|* (alloc)) - (v2:|5,float|* (alloc)) - (i 0) - (k 0)) - (dotimes (i 5) - ;; random returns double so "truncate" to float - ;; which is what v expects - (array-set! v1 i (dtof (random)))) - ;; we can use the afill! function to fill an array - (afill! v2 1.1 2.2 3.3 4.4 5.5) - (dotimes (k 5) - ;; unfortunately printf doesn't like floats - ;; so back to double for us :( - (printf "val: %lld::%f::%f\n" k - (ftod (array-ref v1 k)) - (ftod (aref v2 k))))))) - - (array-test1)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; some crazy array code with -;; closures and arrays -;; try to figure out what this all does -;; -;; this example uses the array type -;; the pretty print for this type is -;; |num,type| num elements of type -;; |5,i64| is an array of 5 x i64 -;; -;; An array is not a pointer type -;; i.e. |5,i64| cannot be bitcast to i64* -;; -;; However an array can be a pointer -;; i.e. |5,i64|* can be bitcast to i64* -;; i.e. |5,i64|** to i64** etc.. -;; -;; make-array returns a pointer to an array -;; i.e. (make-array 5 i64) returns type |5,i64|* -;; -;; aref (array-ref) and aset! (array-set!) -;; can operate with either pointers to arrays or -;; standard pointers. -;; -;; in other words aref and aset! are happy -;; to work with either i64* or |5,i64|* - -(bind-func array-test2 - (lambda (v:|5,i64|*) - (let ((f (lambda (x) - (* (array-ref v 2) x)))) - f))) - -(bind-func array-test3 - (lambda (v:|5,[i64,i64]*|*) - (let ((ff (aref v 0))) ; aref alias for array-ref - (ff 5)))) - -(xtmtest '(bind-func array-test4 - (lambda () - (let ((v:|5,[i64,i64]*|* (alloc)) ;; make an array of closures! - (vv:|5,i64|* (alloc))) - (array-set! vv 2 3) - (aset! v 0 (array-test2 vv)) ;; aset! alias for array-set! - (array-test3 v)))) - - ;; try to guess the answer before you call this!! - (array-test4)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; some conditionals - -(xtmtest '(bind-func cond-test1 - (lambda (x:i64 y) - (if (> x y) - x - y))) - - (cond-test1 12 13)) - -;; returns boolean true -(xtmtest '(bind-func cond-test2 - (lambda (x:i64) - (cond ((= x 1) (printf "A\n")) - ((= x 2) (printf "B\n")) - ((= x 3) (printf "C\n")) - ((= x 4) (printf "D\n")) - (else (printf "E\n"))) - #t)) - - (cond-test2 1)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; making a linear envelop generator -;; for signal processing and alike - -(bind-func envelope-segments - (lambda (points:double* num-of-points:i64) - (let ((lines:[double,double]** (zone-alloc num-of-points)) - (k 0)) - (dotimes (k num-of-points) - (let* ((idx (* k 2)) - (x1 (pointer-ref points (+ idx 0))) - (y1 (pointer-ref points (+ idx 1))) - (x2 (pointer-ref points (+ idx 2))) - (y2 (pointer-ref points (+ idx 3))) - (m (if (= 0.0 (- x2 x1)) 0.0 (/ (- y2 y1) (- x2 x1)))) - (c (- y2 (* m x2))) - (l (lambda (time) (+ (* m time) c)))) - (pointer-set! lines k l))) - lines))) - -(bind-func make-envelope - (lambda (points:double* num-of-points) - (let ((klines:[double,double]** (envelope-segments points num-of-points)) - (line-length num-of-points)) - (lambda (time) - (let ((res -1.0) - (k:i64 0)) - (dotimes (k num-of-points) - (let ((line (pointer-ref klines k)) - (time-point (pointer-ref points (* k 2)))) - (if (or (= time time-point) - (< time-point time)) - (set! res (line time))))) - res))))) - -;; make a convenience wrapper -(xtmtest '(bind-func env-wrap - (let* ((points 3) - (data:double* (zone-alloc (* points 2)))) - (pointer-set! data 0 0.0) ;; point data - (pset! data 1 0.0) - (pset! data 2 2.0) - (pset! data 3 1.0) - (pset! data 4 4.0) - (pset! data 5 0.0) - (let ((f (make-envelope data points))) - (lambda (time:double) - (f time))))) - (env-wrap 0.0) 0.0) - -(xtmtest-result (env-wrap 1.0) 0.5) -(xtmtest-result (env-wrap 2.0) 1.0) -(xtmtest-result (env-wrap 2.5) 0.75) -(xtmtest-result (env-wrap 4.0) 0.0) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; direct access to a closures environment -;; -;; it is possible to directly access a closures -;; environment in order to read or modify data -;; at runtime. -;; -;; You do this using a dot operator -;; To access an environment slot you use -;; closure.slot:type -;; So for example -;; (f.a:i32) -;; would return the 32bit integer symbol 'a' -;; from the closure 'f' -;; -;; To set an environment slot you just -;; add a value of the correct type -;; for example -;; (f.a:i32 565) -;; would set 'a' in 'f' to 565 -;; -;; let's create a closure that capture's 'a' - - -(xtmtest '(bind-func dot-access-test1 - (let ((a:i32 6)) - (lambda () - (printf "a:%d\n" a) - a))) - (dot-access-test1)) - -;; now let's create a new function -;; that calls my-test14 twice -;; once normally -;; then we directly set the closures 'a' binding -;; then call again -;; -(xtmtest '(bind-func dot-access-test2 - (lambda (x:i32) - (dot-access-test1) - (dot-access-test1.a:i32 x) - (dot-access-test1))) - - (dot-access-test2 9)) - -;; of course this works just as well for -;; non-global closures -(xtmtest '(bind-func dot-access-test3 - (lambda (a:i32) - (let ((f (lambda () - (* 3 a)))) - f))) - (dot-access-test3 1)) - -(xtmtest '(bind-func dot-access-test4 - (lambda () - (let ((f (dot-access-test3 5))) - (f.a:i32 7) - (f)))) - - (dot-access-test4) - 21) - -;; and you can get and set closures also! -(xtmtest '(bind-func dot-access-test5 - (lambda () - (let ((f (lambda (x:i64) x))) - (lambda (z) - (f z))))) - - (dot-access-test5)) - -(xtmtest '(bind-func dot-access-test6 - (lambda () - (let ((t1 (dot-access-test5)) - (t2 (dot-access-test5))) - ;; identity of 5 - (printf "%lld:%lld\n" (t1 5) (t2 5)) - (t1.f:[i64,i64]* (lambda (x:i64) (* x x))) - ;; square of 5 - (printf "%lld:%lld\n" (t1 5) (t2 5)) - ;; cube of 5 - (t2.f:[i64,i64]* (lambda (y:i64) (* y y y))) - (printf "%lld:%lld\n" (t1 5) (t2 5)) - void))) - - (dot-access-test6)) ;; 5:5 > 25:5 > 25:125 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; named types - -;; it can sometimes be helpful to allocate -;; a predefined tuple type on the stack -;; you can do this using allocate -(bind-type vec3 ) - -;; String printing! -(bind-func vec3_print:[void,vec3*]* - (lambda (x) - (printf "<%d,%d,%d>" (tref x 0) (tref x 1) (tref x 2)) - void)) - -(bind-poly print vec3_print) - -;; note that point is deallocated at the -;; end of the function call. You can -;; stack allocate (stack-alloc) -;; any valid type (i64 for example) -(xtmtest '(bind-func salloc-test - (lambda () - (let ((point:vec3* (stack-alloc))) - (tset! point 0 0.0) - (tset! point 1 -1.0) - (tset! point 2 1.0) - 1))) - - (salloc-test)) ;; 1 - -;; all named types have 2 default constructors -;; name (zone alloation) + name_h (heap allocation) -;; and a default print poly -(xtmtest '(bind-func data-constructor-test - (lambda () - (let ((v1 (vec3 1.0 2.0 3.0)) - (v2 (vec3_h 4.0 5.0 6.0))) - (println v1 v2) - ;; halloced vec3 needs freeing - (free v2) - void))) - - (data-constructor-test)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; aref-ptr and tref-ptr -;; - -;; aref-ptr and tref-ptr return a pointer to an element -;; just as aref and tref return elements aref-ptr and -;; tref-ptr return a pointer to those elements. - -;; This allows you to do things like create an array -;; with an offset -(xtmtest '(bind-func aref-ptr-test - (lambda () - (let ((arr:|32,i64|* (alloc)) - (arroff (aref-ptr arr 16)) - (i 0) - (k 0)) - ;; load arr - (dotimes (i 32) (aset! arr i i)) - (dotimes (k 16) - (printf "index: %lld\tarr: %lld\tarroff: %lld\n" - k (aref arr k) (pref arroff k)))))) - - (aref-ptr-test)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; arrays -;; Extempore lang supports arrays as for first class -;; aggregate types (in other words as distinct from -;; a pointer). -;; -;; an array is made up of a size and a type -;; |32,i64| is an array of 32 elements of type i64 -;; - -(bind-type tuple-with-array ) - -(xtmtest '(bind-func array-test5 - (lambda () - (let ((tup:tuple-with-array* (stack-alloc)) - (t2:|32,i64|* (stack-alloc))) - (aset! t2 0 9) - (tset! tup 2 5.5) - (aset! (aref-ptr (tref-ptr tup 1) 0) 0 0) - (aset! (aref-ptr (tref-ptr tup 1) 0) 1 1) - (aset! (aref-ptr (tref-ptr tup 1) 0) 2 2) - (printf "val: %lld %lld %f\n" - (aref (aref-ptr (tref-ptr tup 1) 0) 1) - (aref t2 0) (ftod (tref tup 2))) - (aref (aref-ptr (tref-ptr tup 1) 0) 1)))) - - (array-test5) 1) ;; val: 1 9 5.5 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; Global Variables -;; -;; You can allocate global variables using bind-val -;; - -(bind-val g_var_a i32 5) - -;; increment g_var_a by inc -;; and return new value of g_var_a -(xtmtest '(bind-func global_var_test1 - (lambda (incr) - (set! g_var_a (+ g_var_a incr)) - g_var_a)) - - (global_var_test1 3) 8) ;; 8 - -;; you can bind any primitive type -(bind-val g_var_b double 5.5) -(bind-val g_var_c i1 0) - -(xtmtest '(bind-func global_var_test1b - (lambda () - (* g_var_b (if g_var_c 1.0 4.0)))) - - (global_var_test1b) 22.0) - -;; global strings - -(bind-val g_cstring i8* "Jiblet.") - -(xtmtest '(bind-func test_g_cstring - (lambda () - (let ((i 0)) - (dotimes (i 7) - (printf "g_cstring[%lld] = %c\n" i (pref g_cstring i))) - (printf "\nSpells... %s\n" g_cstring)))) - - (test_g_cstring)) - -(xtmtest '(bind-func test_g_cstring1 - (lambda () - (let ((test_cstring "Niblot.") - (i 0) - (total 0)) - (dotimes (i 7) - (let ((c1 (pref g_cstring i)) - (c2 (pref test_cstring i))) - (printf "checking %c against %c\n" c1 c2) - (if (= c1 c2) - (set! total (+ total 1))))) - total))) - - (test_g_cstring1) 5) - - - - - -;; for tuples, arrays and vectors, bind-val only takes *two* -;; arguments. The tuple/array/vector will be initialised to zero. - -(bind-val g_tuple1 ) -(bind-val g_tuple2 ) - -(xtmtest '(bind-func test_g_tuple - (lambda () - (tfill! g_tuple1 1 4) - (tfill! g_tuple2 4.0 1.0) - (and (= (tref g_tuple1 0) (dtoi64 (tref g_tuple2 1))) - (= (dtoi64 (tref g_tuple2 0)) (tref g_tuple1 1))))) - - (test_g_tuple) 1) - -;; same thing with arrays - -(bind-val g_array1 |10,double|) -(bind-val g_array2 |10,i64|) - -;; if we just loop over and print the values in each array - -(xtmtest '(bind-func test_g_array11 - (lambda () - (let ((i 0)) - (dotimes (i 10) - (printf "garray_1[%lld] = %f garray_2[%lld] = %lld\n" - i (aref g_array1 i) i (aref g_array2 i)))))) - - (test_g_array11) 1) - -;; but if we loop over and set some values into the arrays - -(xtmtest '(bind-func test_g_array2 - (lambda () - (let ((i 0)) - (dotimes (i 10) - (aset! g_array1 i (i64tod i)) - (aset! g_array2 i i) - (printf "garray_1[%lld] = %f garray_2[%lld] = %lld\n" - i (aref g_array1 i) i (aref g_array2 i))) - (= (dtoi64 (aref g_array1 5)) - (aref g_array2 5))))) - - (test_g_array2) 1) - -;; just to test, let's try a large array - -(bind-val g_array3 |100000000,i64|) - -(xtmtest '(bind-func test_g_array3 - (lambda () - (let ((i 0)) - (dotimes (i 100000000) - (aset! g_array3 i i)) - (= (pref g_array3 87654321) - 87654321)))) - - (test_g_array3) 1) - -;; if you want to bind a global pointer, then the third 'value' -;; argument is the size of the memory to allocate (in elements, not in bytes) - -(bind-val g_ptr0 double* 10) - -(xtmtest '(bind-func test_g_ptr0 - (lambda () - (let ((total 0.0) - (i 0)) - (dotimes (i 10) - (pset! g_ptr0 i (i64tod i)) - (set! total (+ total (pref g_ptr0 i)))) - total))) - - (test_g_ptr0) 45.0) - -(bind-val g_ptr1 |4,i32|* 2) -(bind-val g_ptr2 * 4) - -(xtmtest '(bind-func test_g_ptr1 - (lambda () - (afill! g_ptr1 11 66 35 81) - (tset! g_ptr2 1 35.0) - (printf "%f :: %d\n" (tref g_ptr2 1) (aref g_ptr1 2)) - (aref g_ptr1 3))) - - (test_g_ptr1) 81) ;; should also print 35.000000 :: 35 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; Callbacks - -(xtmtest '(bind-func callback-test - (lambda (time:i64 count:i64) - (printf "time: %lld:%lld\n" time count) - (callback (+ time 1000) callback-test (+ time 22050) (+ count 1)))) - - (callback-test (now) 0)) - -;; compiling this will stop the callbacks -;; -;; of course we need to keep the type -;; signature the same [void,i64,i64]* -;; -(xtmtest '(bind-func callback-test - (lambda (time:i64 count:i64) - #t)) - - (callback-test)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; some memzone tests - -(xtmtest '(bind-func memzone-test1 - (lambda () - (let ((b:|5,double|* (zalloc))) - (aset! b 0 - (memzone 1024 - (let ((a:|10,double|* (zalloc))) - (aset! a 0 3.5) - (aref a 0)))) - (let ((c:|9,i32|* (zalloc))) - (aset! c 0 99) - (aref b 0))))) - - (memzone-test1) 3.5) - -(xtmtest '(bind-func memzone-test2 - (lambda () - (memzone 1024 - (let ((k:|15,double|* (zalloc)) - (f (lambda (fa:|15,double|*) - (memzone 1024 - (let ((a:|10,double|* (zalloc)) - (i 0)) - (dotimes (i 10) - (aset! a i (* (aref fa i) (random)))) - a))))) - (f k))))) - - (memzone-test2)) - -(xtmtest '(bind-func memzone-test3 - (lambda () - (let ((v (memzone-test2)) - (i 0)) - (dotimes (i 10) (printf "%lld:%f\n" i (aref v i)))))) - - (memzone-test3)) ;; should print all 0.0's - -(xtmtest '(bind-func memzone-test4 - (lambda () - (memzone 1024 (* 44100 10) - (let ((a:|5,double|* (alloc))) - (aset! a 0 5.5) - (aref a 0))))) - - (memzone-test4) 5.50000) - -;; -;; Large allocation of memory on BUILD (i.e. when the closure is created) -;; requires an optional argument (i.e. an amount of memory to allocate -;; specifically for closure creation) -;; -;; This memory is automatically free'd whenever you recompile the closure -;; (it will be destroyed and replaced by a new allocation of the -;; same amount or whatever new amount you have allocated for closure -;; compilation) -;; -(xtmtest '(bind-func closure-zalloc-test 1000000 - (let ((k:|100000,double|* (zalloc))) - (lambda () - (aset! k 0 1.0) - (aref k 0)))) - - (closure-zalloc-test 1000000)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; Ad-Hoc Polymorphism -;; -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -;; extempore supports ad-hoc polymorphism -;; at some stage in the future this will -;; be implicit - but for the moment -;; it is explicitly defined using bind-poly - -;; ad-hoc polymorphism allows you to provide -;; different specialisations depending on -;; type. In other words, a single 'name' -;; can be bound to multiple function -;; implementations each with a uniqute -;; type. - - -;; poly variables can be for functions of -;; mixed argument lengths -;; -;; so for example: -(bind-func poly-test4 - (lambda (a:i8*) - (printf "%s\n" a))) - -(bind-func poly-test5 - (lambda (a:i8* b:i8*) - (printf "%s %s\n" a b))) - -(bind-func poly-test6 - (lambda (a:i8* b:i8* c:i8*) - (printf "%s %s %s\n" a b c))) - -;; bind these three functions to poly 'print' -(bind-poly testprint poly-test4) -(bind-poly testprint poly-test5) -(bind-poly testprint poly-test6) - -(xtmtest '(bind-func poly-test7 - (lambda () - (testprint "extempore's") - (testprint "extempore's" "polymorphism") - (testprint "extempore's" "polymorphism" "rocks"))) - - (poly-test7)) - -;; polys can Also specialize -;; on the return type -(bind-func poly-test8 - (lambda (a:double) - (* a a))) - -(bind-func poly-test9 - (lambda (a:double) - (dtoi64 (* a a)))) - -(bind-poly sqrd poly-test8) -(bind-poly sqrd poly-test9) - -;; specialize on [i64,double]* -;; -(xtmtest '(bind-func poly-test10:[i64,double]* - (lambda (a) - (+ 1 (sqrd a)))) - (poly-test10 5.0)) - -;; specialize on [double,doube]* -(xtmtest '(bind-func poly-test11:[double,double]* - (lambda (a) - (+ 1.0 (sqrd a)))) - - (poly-test11 5.0)) - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; a little test for zone cleanup -;; -(bind-func MyLittleCleanupTest - (lambda () - (let ((tmp2:i8* (alloc 8))) - (cleanup (println "Clean up before leaving zone!")) - tmp2))) - -(xtmtest '(bind-func cleanup-test - (lambda () - (letz ((tmp:i8* (alloc 8)) - (t2 (MyLittleCleanupTest))) - (begin - (println "In Zone ...") - 1)) - (println "Out of zone ...") - void)) - - (cleanup-test)) - -;;;;;;;;;;;;;;;;;; -;; vector types - -;; (bind-func vector-test1 -;; (lambda () -;; (let ((v1:/4,float/* (alloc)) -;; (v2:/4,float/* (alloc)) -;; (v3:/4,float/* (alloc))) -;; (vfill! v1 4.0 3.0 2.0 1.0) -;; (vfill! v2 1.0 2.0 3.0 4.0) -;; (vfill! v3 5.0 5.0 5.0 5.0) -;; (let ((v4 (* v1 v2)) -;; (v5 (> v3 v4))) ;; unforunately vector conditionals don't work! -;; (printf "mul:%f:%f:%f:%f\n" (ftod (vref v4 0)) (ftod (vref v4 1)) (ftod (vref v4 2)) (ftod (vref v4 3))) -;; (printf "cmp:%d:%d:%d:%d\n" (i1toi32 (vref v5 0)) (i1toi32 (vref v5 1)) (i1toi32 (vref v5 2)) (i1toi32 (vref v5 3))) -;; void)))) - -;; (test-xtfunc (vector-test1)) - -(bind-func vector-test2 - (lambda () - (let ((v1:/4,float/* (alloc)) - (v2:/4,float/* (alloc))) - (vfill! v1 1.0 2.0 4.0 8.0) - (vfill! v2 2.0 2.5 2.25 2.125) - (* v1 v2)))) - -(xtmtest '(bind-func vector-test3 - (lambda () - (let ((a (vector-test2))) - (printf "%f:%f:%f:%f\n" - (ftod (vref a 0)) - (ftod (vref a 1)) - (ftod (vref a 2)) - (ftod (vref a 3))) - void))) - - (vector-test3)) - -;; vectorised sine func -(bind-func vsinf4 - (let ((p:/4,float/* (alloc)) - (b:/4,float/* (alloc)) - (c:/4,float/* (alloc)) - (f1:/4,float/* (alloc)) - (f2:/4,float/* (alloc)) - (i:i32 0) - (p_ 0.225) - (b_ (dtof (/ 4.0 3.1415))) - (c_ (dtof (/ -4.0 (* 3.1415 3.1415))))) - (dotimes (i 4) (vset! p i p_) (vset! b i b_) (vset! c i c_)) - (lambda (x:/4,float/) - ;; no SIMD for abs yet! - (dotimes (i 4) (vset! f1 i (fabs (vref x i)))) - (let ((y (+ (* b x) (* c x f1)))) - ;; no SIMD for abs yet! - (dotimes (i 4) (vset! f2 i (fabs (vref y i)))) - (+ (* p (- (* y f2) y)) y))))) - -(bind-func vcosf4 - (let ((p:/4,float/* (alloc)) - (b:/4,float/* (alloc)) - (c:/4,float/* (alloc)) - (d:/4,float/* (alloc)) - (f1:/4,float/* (alloc)) - (f2:/4,float/* (alloc)) - (i:i32 0) - (p_ 0.225) - (d_ (dtof (/ 3.1415 2.0))) - (b_ (dtof (/ 4.0 3.1415))) - (c_ (dtof (/ -4.0 (* 3.1415 3.1415))))) - (dotimes (i 4) - (vset! p i p_) (vset! b i b_) (vset! c i c_) (vset! d i d_)) - (lambda (x:/4,float/) - ;; offset x for cos - (set! x (+ x d)) - ;; no SIMD for abs yet! - (dotimes (i 4) (vset! f1 i (fabs (vref x i)))) - (let ((y (+ (* b x) (* c x f1)))) - ;; no SIMD for abs yet! - (dotimes (i 4) (vset! f2 i (fabs (vref y i)))) - (+ (* p (- (* y f2) y)) y))))) - - -(xtmtest '(bind-func vector-test4 - (lambda () - (let ((a:/4,float/* (alloc))) - (vfill! a 0.1 0.2 0.3 0.4) - (let ((b (vsinf4 (pref a 0))) - (c (vcosf4 (pref a 0)))) - (printf "precision inaccuracy is expected:\n") - (printf " sinf:\t%f,%f,%f,%f\n" - (ftod (sin 0.1:f)) - (ftod (sin 0.2:f)) - (ftod (sin 0.3:f)) - (ftod (sin 0.4:f))) - (printf "vsinf:\t%f,%f,%f,%f\n" - (ftod (vref b 0)) - (ftod (vref b 1)) - (ftod (vref b 2)) - (ftod (vref b 3))) - (printf " cosf:\t%f,%f,%f,%f\n" - (ftod (cos 0.1:f)) - (ftod (cos 0.2:f)) - (ftod (cos 0.3:f)) - (ftod (cos 0.4:f))) - (printf "vcosf:\t%f,%f,%f,%f\n" - (ftod (vref c 0)) - (ftod (vref c 1)) - (ftod (vref c 2)) - (ftod (vref c 3))) - void)))) - - (vector-test4)) - -;; test the call-as-xtlang macro - -;; make sure it'll handle multiple body forms -(xtmtest-result (call-as-xtlang (println 1) (println 2) 5) - 5) - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; -;; test globalvar as closure -;; -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -(bind-func testinc - (lambda (incr:i64) - (lambda (x:i64) - (+ x incr)))) - -(bind-val GlobalInc [i64,i64]* (testinc 2)) - -(xtmtest '(bind-func ginc - (lambda () - (GlobalInc 5))) - (ginc) 7) - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; syntax highlighting tests ;; -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; these don't return any values, they're visual tests---do they look -;; right? - -(bind-func hl_test1a:[i32,double,|4,i32|**]* 4000 - "docstring" - (lambda (a b) - (printf "done\n"))) - -(bind-func hl_test1b:[i32]* - (lambda () - (let ((i:i32 6)) - (printf "done\n")))) - -(bind-val hl_test2 ) -(bind-val hl_test3 |4,i8|) -(bind-val hl_test4 double* 10) -(bind-val hl_test5 i8* "teststr") - -(bind-type hl_test_type ) - -(println '(bind-lib testlib testfn [i32,i32]*)) - -;; (and 4 5) -;; (bind-val hl_test4 double* 10) -;; (bind-type hl_test_type "docstring") -;; (bind-lib testlib testfn [i32,i32]*) diff --git a/tests/examplefiles/example.yaml b/tests/examplefiles/example.yaml deleted file mode 100644 index 17544c02..00000000 --- a/tests/examplefiles/example.yaml +++ /dev/null @@ -1,311 +0,0 @@ -# -# Regression tests -# - -%TAG ! tag:example.com:foo/ ---- -test: !foo/bar {a: 'asdf'} -test2: fred -... - -# -# Examples from the Preview section of the YAML specification -# (http://yaml.org/spec/1.2/#Preview) -# - -# Sequence of scalars ---- -- Mark McGwire -- Sammy Sosa -- Ken Griffey - -# Mapping scalars to scalars ---- -hr: 65 # Home runs -avg: 0.278 # Batting average -rbi: 147 # Runs Batted In - -# Mapping scalars to sequences ---- -american: - - Boston Red Sox - - Detroit Tigers - - New York Yankees -national: - - New York Mets - - Chicago Cubs - - Atlanta Braves - -# Sequence of mappings ---- -- - name: Mark McGwire - hr: 65 - avg: 0.278 -- - name: Sammy Sosa - hr: 63 - avg: 0.288 - -# Sequence of sequences ---- -- [name , hr, avg ] -- [Mark McGwire, 65, 0.278] -- [Sammy Sosa , 63, 0.288] - -# Mapping of mappings ---- -Mark McGwire: {hr: 65, avg: 0.278} -Sammy Sosa: { - hr: 63, - avg: 0.288 - } - -# Two documents in a stream ---- # Ranking of 1998 home runs -- Mark McGwire -- Sammy Sosa -- Ken Griffey ---- # Team ranking -- Chicago Cubs -- St Louis Cardinals - -# Documents with the end indicator ---- -time: 20:03:20 -player: Sammy Sosa -action: strike (miss) -... ---- -time: 20:03:47 -player: Sammy Sosa -action: grand slam -... - -# Comments ---- -hr: # 1998 hr ranking - - Mark McGwire - - Sammy Sosa -rbi: - # 1998 rbi ranking - - Sammy Sosa - - Ken Griffey - -# Anchors and aliases ---- -hr: - - Mark McGwire - # Following node labeled SS - - &SS Sammy Sosa -rbi: - - *SS # Subsequent occurrence - - Ken Griffey - -# Mapping between sequences ---- -? - Detroit Tigers - - Chicago cubs -: - - 2001-07-23 -? [ New York Yankees, - Atlanta Braves ] -: [ 2001-07-02, 2001-08-12, - 2001-08-14 ] - -# Inline nested mapping ---- -# products purchased -- item : Super Hoop - quantity: 1 -- item : Basketball - quantity: 4 -- item : Big Shoes - quantity: 1 - -# Literal scalars ---- | # ASCII art - \//||\/|| - // || ||__ - -# Folded scalars ---- > - Mark McGwire's - year was crippled - by a knee injury. - -# Preserved indented block in a folded scalar ---- -> - Sammy Sosa completed another - fine season with great stats. - - 63 Home Runs - 0.288 Batting Average - - What a year! - -# Indentation determines scope ---- -name: Mark McGwire -accomplishment: > - Mark set a major league - home run record in 1998. -stats: | - 65 Home Runs - 0.278 Batting Average - -# Quoted scalars ---- -unicode: "Sosa did fine.\u263A" -control: "\b1998\t1999\t2000\n" -hex esc: "\x0d\x0a is \r\n" -single: '"Howdy!" he cried.' -quoted: ' # not a ''comment''.' -tie-fighter: '|\-*-/|' - -# Multi-line flow scalars ---- -plain: - This unquoted scalar - spans many lines. -quoted: "So does this - quoted scalar.\n" - -# Integers ---- -canonical: 12345 -decimal: +12_345 -sexagesimal: 3:25:45 -octal: 014 -hexadecimal: 0xC - -# Floating point ---- -canonical: 1.23015e+3 -exponential: 12.3015e+02 -sexagesimal: 20:30.15 -fixed: 1_230.15 -negative infinity: -.inf -not a number: .NaN - -# Miscellaneous ---- -null: ~ -true: boolean -false: boolean -string: '12345' - -# Timestamps ---- -canonical: 2001-12-15T02:59:43.1Z -iso8601: 2001-12-14t21:59:43.10-05:00 -spaced: 2001-12-14 21:59:43.10 -5 -date: 2002-12-14 - -# Various explicit tags ---- -not-date: !!str 2002-04-28 -picture: !!binary | - R0lGODlhDAAMAIQAAP//9/X - 17unp5WZmZgAAAOfn515eXv - Pz7Y6OjuDg4J+fn5OTk6enp - 56enmleECcgggoBADs= -application specific tag: !something | - The semantics of the tag - above may be different for - different documents. - -# Global tags -%TAG ! tag:clarkevans.com,2002: ---- !shape - # Use the ! handle for presenting - # tag:clarkevans.com,2002:circle -- !circle - center: &ORIGIN {x: 73, y: 129} - radius: 7 -- !line - start: *ORIGIN - finish: { x: 89, y: 102 } -- !label - start: *ORIGIN - color: 0xFFEEBB - text: Pretty vector drawing. - -# Unordered sets ---- !!set -# sets are represented as a -# mapping where each key is -# associated with the empty string -? Mark McGwire -? Sammy Sosa -? Ken Griff - -# Ordered mappings ---- !!omap -# ordered maps are represented as -# a sequence of mappings, with -# each mapping having one key -- Mark McGwire: 65 -- Sammy Sosa: 63 -- Ken Griffy: 58 - -# Full length example ---- ! -invoice: 34843 -date : 2001-01-23 -bill-to: &id001 - given : Chris - family : Dumars - address: - lines: | - 458 Walkman Dr. - Suite #292 - city : Royal Oak - state : MI - postal : 48046 -ship-to: *id001 -product: - - sku : BL394D - quantity : 4 - description : Basketball - price : 450.00 - - sku : BL4438H - quantity : 1 - description : Super Hoop - price : 2392.00 -tax : 251.42 -total: 4443.52 -comments: - Late afternoon is best. - Backup contact is Nancy - Billsmer @ 338-4338. - -# Another full-length example ---- -Time: 2001-11-23 15:01:42 -5 -User: ed -Warning: - This is an error message - for the log file ---- -Time: 2001-11-23 15:02:31 -5 -User: ed -Warning: - A slightly different error - message. ---- -Date: 2001-11-23 15:03:17 -5 -User: ed -Fatal: - Unknown variable "bar" -Stack: - - file: TopClass.py - line: 23 - code: | - x = MoreObject("345\n") - - file: MoreClass.py - line: 58 - code: |- - foo = bar - diff --git a/tests/examplefiles/example.zig b/tests/examplefiles/example.zig deleted file mode 100644 index 32e72849..00000000 --- a/tests/examplefiles/example.zig +++ /dev/null @@ -1,263 +0,0 @@ -const std = @import("std"); -const Allocator = mem.Allocator; -const mem = std.mem; -const ast = std.zig.ast; -const Visib = @import("visib.zig").Visib; -const event = std.event; -const Value = @import("value.zig").Value; -const Token = std.zig.Token; -const errmsg = @import("errmsg.zig"); -const Scope = @import("scope.zig").Scope; -const Compilation = @import("compilation.zig").Compilation; - -pub const Decl = struct { - id: Id, - name: []const u8, - visib: Visib, - resolution: event.Future(Compilation.BuildError!void), - parent_scope: *Scope, - - // TODO when we destroy the decl, deref the tree scope - tree_scope: *Scope.AstTree, - - pub const Table = std.HashMap([]const u8, *Decl, mem.hash_slice_u8, mem.eql_slice_u8); - - pub fn cast(base: *Decl, comptime T: type) ?*T { - if (base.id != @field(Id, @typeName(T))) return null; - return @fieldParentPtr(T, "base", base); - } - - pub fn isExported(base: *const Decl, tree: *ast.Tree) bool { - switch (base.id) { - Id.Fn => { - const fn_decl = @fieldParentPtr(Fn, "base", base); - return fn_decl.isExported(tree); - }, - else => return false, - } - } - - pub fn getSpan(base: *const Decl) errmsg.Span { - switch (base.id) { - Id.Fn => { - const fn_decl = @fieldParentPtr(Fn, "base", base); - const fn_proto = fn_decl.fn_proto; - const start = fn_proto.fn_token; - const end = fn_proto.name_token orelse start; - return errmsg.Span{ - .first = start, - .last = end + 1, - }; - }, - else => @panic("TODO"), - } - } - - pub fn findRootScope(base: *const Decl) *Scope.Root { - return base.parent_scope.findRoot(); - } - - pub const Id = enum { - Var, - Fn, - CompTime, - }; - - pub const Var = struct { - base: Decl, - }; - - pub const Fn = struct { - base: Decl, - value: Val, - fn_proto: *ast.Node.FnProto, - - // TODO https://github.com/ziglang/zig/issues/683 and then make this anonymous - pub const Val = union(enum) { - Unresolved: void, - Fn: *Value.Fn, - FnProto: *Value.FnProto, - }; - - pub fn externLibName(self: Fn, tree: *ast.Tree) ?[]const u8 { - return if (self.fn_proto.extern_export_inline_token) |tok_index| x: { - const token = tree.tokens.at(tok_index); - break :x switch (token.id) { - Token.Id.Extern => tree.tokenSlicePtr(token), - else => null, - }; - } else null; - } - - pub fn isExported(self: Fn, tree: *ast.Tree) bool { - if (self.fn_proto.extern_export_inline_token) |tok_index| { - const token = tree.tokens.at(tok_index); - return token.id == Token.Id.Keyword_export; - } else { - return false; - } - } - }; - - pub const CompTime = struct { - base: Decl, - }; -}; - -pub const info_zen = - \\ - \\ * Communicate intent precisely. - \\ * Edge cases matter. - \\ * Favor reading code over writing code. - \\ * Only one obvious way to do things. - \\ * Runtime crashes are better than bugs. - \\ * Compile errors are better than runtime crashes. - \\ * Incremental improvements. - \\ * Avoid local maximums. - \\ * Reduce the amount one must remember. - \\ * Minimize energy spent on coding style. - \\ * Together we serve end users. - \\ - \\ -; - -fn cmdZen(allocator: *Allocator, args: []const []const u8) !void { - try stdout.write(info_zen); -} - -const usage_internal = - \\usage: zig internal [subcommand] - \\ - \\Sub-Commands: - \\ build-info Print static compiler build-info - \\ - \\ -; - -fn cmdInternal(allocator: *Allocator, args: []const []const u8) !void { - if (args.len == 0) { - try stderr.write(usage_internal); - os.exit(1); - } - - const sub_commands = []Command{Command{ - .name = "build-info", - .exec = cmdInternalBuildInfo, - }}; - - for (sub_commands) |sub_command| { - if (mem.eql(u8, sub_command.name, args[0])) { - try sub_command.exec(allocator, args[1..]); - return; - } - } - - try stderr.print("unknown sub command: {}\n\n", args[0]); - try stderr.write(usage_internal); -} - -fn cmdInternalBuildInfo(allocator: *Allocator, args: []const []const u8) !void { - try stdout.print( - \\ZIG_CMAKE_BINARY_DIR {} - \\ZIG_CXX_COMPILER {} - \\ZIG_LLVM_CONFIG_EXE {} - \\ZIG_LLD_INCLUDE_PATH {} - \\ZIG_LLD_LIBRARIES {} - \\ZIG_STD_FILES {} - \\ZIG_C_HEADER_FILES {} - \\ZIG_DIA_GUIDS_LIB {} - \\ - , - std.cstr.toSliceConst(c.ZIG_CMAKE_BINARY_DIR), - std.cstr.toSliceConst(c.ZIG_CXX_COMPILER), - std.cstr.toSliceConst(c.ZIG_LLVM_CONFIG_EXE), - std.cstr.toSliceConst(c.ZIG_LLD_INCLUDE_PATH), - std.cstr.toSliceConst(c.ZIG_LLD_LIBRARIES), - std.cstr.toSliceConst(c.ZIG_STD_FILES), - std.cstr.toSliceConst(c.ZIG_C_HEADER_FILES), - std.cstr.toSliceConst(c.ZIG_DIA_GUIDS_LIB), - ); -} - -fn test__floatuntisf(a: u128, expected: f32) void { - const x = __floatuntisf(a); - testing.expect(x == expected); -} - -test "floatuntisf" { - test__floatuntisf(0, 0.0); - - test__floatuntisf(1, 1.0); - test__floatuntisf(2, 2.0); - test__floatuntisf(20, 20.0); - - test__floatuntisf(0x7FFFFF8000000000, 0x1.FFFFFEp+62); - test__floatuntisf(0x7FFFFF0000000000, 0x1.FFFFFCp+62); - - test__floatuntisf(make_ti(0x8000008000000000, 0), 0x1.000001p+127); - test__floatuntisf(make_ti(0x8000000000000800, 0), 0x1.0p+127); - test__floatuntisf(make_ti(0x8000010000000000, 0), 0x1.000002p+127); - - test__floatuntisf(make_ti(0x8000000000000000, 0), 0x1.000000p+127); - - test__floatuntisf(0x0007FB72E8000000, 0x1.FEDCBAp+50); - - test__floatuntisf(0x0007FB72EA000000, 0x1.FEDCBA8p+50); - test__floatuntisf(0x0007FB72EB000000, 0x1.FEDCBACp+50); - - test__floatuntisf(0x0007FB72EC000000, 0x1.FEDCBBp+50); - - test__floatuntisf(0x0007FB72E6000000, 0x1.FEDCB98p+50); - test__floatuntisf(0x0007FB72E7000000, 0x1.FEDCB9Cp+50); - test__floatuntisf(0x0007FB72E4000000, 0x1.FEDCB9p+50); - - test__floatuntisf(0xFFFFFFFFFFFFFFFE, 0x1p+64); - test__floatuntisf(0xFFFFFFFFFFFFFFFF, 0x1p+64); - - test__floatuntisf(0x0007FB72E8000000, 0x1.FEDCBAp+50); - - test__floatuntisf(0x0007FB72EA000000, 0x1.FEDCBAp+50); - test__floatuntisf(0x0007FB72EB000000, 0x1.FEDCBAp+50); - test__floatuntisf(0x0007FB72EBFFFFFF, 0x1.FEDCBAp+50); - test__floatuntisf(0x0007FB72EC000000, 0x1.FEDCBCp+50); - test__floatuntisf(0x0007FB72E8000001, 0x1.FEDCBAp+50); - - test__floatuntisf(0x0007FB72E6000000, 0x1.FEDCBAp+50); - test__floatuntisf(0x0007FB72E7000000, 0x1.FEDCBAp+50); - test__floatuntisf(0x0007FB72E7FFFFFF, 0x1.FEDCBAp+50); - test__floatuntisf(0x0007FB72E4000001, 0x1.FEDCBAp+50); - test__floatuntisf(0x0007FB72E4000000, 0x1.FEDCB8p+50); - - test__floatuntisf(make_ti(0x0000000000001FED, 0xCB90000000000001), 0x1.FEDCBAp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBA0000000000000), 0x1.FEDCBAp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBAFFFFFFFFFFFFF), 0x1.FEDCBAp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBB0000000000000), 0x1.FEDCBCp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBB0000000000001), 0x1.FEDCBCp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBBFFFFFFFFFFFFF), 0x1.FEDCBCp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBC0000000000000), 0x1.FEDCBCp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBC0000000000001), 0x1.FEDCBCp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBD0000000000000), 0x1.FEDCBCp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBD0000000000001), 0x1.FEDCBEp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBDFFFFFFFFFFFFF), 0x1.FEDCBEp+76); - test__floatuntisf(make_ti(0x0000000000001FED, 0xCBE0000000000000), 0x1.FEDCBEp+76); -} - -fn trimStart(slice: []const u8, ch: u8) []const u8 { - var i: usize = 0; - const test_string = "test\"string"; - for (slice) |b| { - if (b == '\xa3') break; - if (b == '\ua3d3') break; - if (b == '\Ua3d3d3') break; - if (b == '\t') break; - if (b == '\n') break; - if (b == '\\') break; - if (b == '\'') break; - if (b == '"') break; - if (b != 'n') break; - if (b != '-') break; - i += 1; - } - - return slice[i..]; -} diff --git a/tests/examplefiles/example1.cadl b/tests/examplefiles/example1.cadl deleted file mode 100644 index 3350fa3b..00000000 --- a/tests/examplefiles/example1.cadl +++ /dev/null @@ -1,149 +0,0 @@ - -- - -- Example fragment of an openEHR Archetype, written in cADL, a subsyntax of the Archetype Definition Language (ADL) - -- definition available here: http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf - -- Author: Thomas Beale - -- - - EVALUATION[id1] matches { -- Adverse Reaction - data matches { - ITEM_TREE[id2] matches { - items cardinality matches {1..*; unordered} matches { - ELEMENT[id3] matches { -- Substance/Agent - value matches { - DV_TEXT[id51] - } - } - ELEMENT[id5] occurrences matches {0..1} matches { -- Absolute Contraindication? - value matches { - DV_BOOLEAN[id52] matches { - value matches {True} - } - } - } - ELEMENT[id50] occurrences matches {0..1} matches { -- Future Use - value matches { - DV_TEXT[id53] - } - } - ELEMENT[id7] occurrences matches {0..1} matches { -- Overall Comment - value matches { - DV_TEXT[id54] - } - } - CLUSTER[id10] matches { -- Reaction Event - items matches { - ELEMENT[id11] occurrences matches {0..1} matches { -- Specific Substance/Agent - value matches { - DV_TEXT[id55] - } - } - ELEMENT[id12] matches { -- Manifestation - value matches { - DV_TEXT[id56] - } - } - ELEMENT[id17] occurrences matches {0..1} matches { -- Reaction Type - value matches { - DV_TEXT[id57] - } - } - ELEMENT[id22] occurrences matches {0..1} matches { -- Certainty - value matches { - DV_CODED_TEXT[id58] matches { - defining_code matches {[ac1]} -- Certainty (synthesised) - } - } - } - ELEMENT[id13] occurrences matches {0..1} matches { -- Reaction Description - value matches { - DV_TEXT[id59] - } - } - ELEMENT[id28] occurrences matches {0..1} matches { -- Onset of Reaction - value matches { - DV_DATE_TIME[id60] - } - } - ELEMENT[id29] occurrences matches {0..1} matches { -- Duration of Reaction - value matches { - DV_DURATION[id61] - } - } - allow_archetype CLUSTER[id30] matches { -- Additional Reaction Detail - include - archetype_id/value matches {/openEHR-EHR-CLUSTER\.anatomical_location(-a-zA-Z0-9_]+)*\.v1/} - } - ELEMENT[id19] occurrences matches {0..1} matches { -- Exposure Description - value matches { - DV_TEXT[id62] - } - } - ELEMENT[id21] occurrences matches {0..1} matches { -- Earliest Exposure - value matches { - DV_DATE_TIME[id63] - } - } - ELEMENT[id26] occurrences matches {0..1} matches { -- Duration of Exposure - value matches { - DV_DURATION[id64] - } - } - allow_archetype CLUSTER[id20] matches { -- Additional Exposure Detail - include - archetype_id/value matches {/openEHR-EHR-CLUSTER\.amount(-a-zA-Z0-9_]+)*\.v1|openEHR-EHR-CLUSTER\.medication_admin(-a-zA-Z0-9_]+)*\.v1|openEHR-EHR-CLUSTER\.timing(-a-zA-Z0-9_]+)*\.v1/} - } - ELEMENT[id41] occurrences matches {0..1} matches { -- Clinical Management Description - value matches { - DV_TEXT[id65] - } - } - ELEMENT[id32] matches { -- Multimedia - value matches { - DV_MULTIMEDIA[id66] matches { - media_type - } - } - } - allow_archetype CLUSTER[id42] matches { -- Reporting Details - include - archetype_id/value matches {/.*/} - } - ELEMENT[id33] occurrences matches {0..1} matches { -- Reaction Comment - value matches { - DV_TEXT[id67] - } - } - } - } - } - } - } - protocol matches { - ITEM_TREE[id43] matches { - items matches { - ELEMENT[id45] occurrences matches {0..1} matches { -- Reaction Reported? - value matches { - DV_BOOLEAN[id68] matches { - value matches {True, False} - } - } - } - ELEMENT[id49] occurrences matches {0..1} matches { -- Report Comment - value matches { - DV_TEXT[id69] - } - } - ELEMENT[id46] matches { -- Adverse Reaction Report - value matches { - DV_URI[id70] - } - } - ELEMENT[id48] occurrences matches {0..1} matches { -- Supporting Clinical Record Information - value matches { - DV_EHR_URI[id71] - } - } - } - } - } - } diff --git a/tests/examplefiles/example2.aspx b/tests/examplefiles/example2.aspx deleted file mode 100644 index 52b7c001..00000000 --- a/tests/examplefiles/example2.aspx +++ /dev/null @@ -1,29 +0,0 @@ -<%@ Register TagPrefix="Acme" TagName="Message" Src="userctrl2_vb.ascx" %> - - - - - - - -

A Simple User Control w/ Properties

- -
- - - -

- - - - - - - diff --git a/tests/examplefiles/example2.cpp b/tests/examplefiles/example2.cpp deleted file mode 100644 index ccd99383..00000000 --- a/tests/examplefiles/example2.cpp +++ /dev/null @@ -1,20 +0,0 @@ -/* - * A Test file for the different string literals. - */ - -#include - -int main() { - char *_str = "a normal string"; - wchar_t *L_str = L"a wide string"; - char *u8_str = u8"utf-8 string"; - char16_t *u_str = u"utf-16 string"; - char32_t *U_str = U"utf-32 string"; - char *R_str = R""""(raw string with -""" -as a delimiter)""""; - - std::cout << R_str << std::endl; - - return 0; -} diff --git a/tests/examplefiles/example2.msc b/tests/examplefiles/example2.msc deleted file mode 100644 index 61e2ef83..00000000 --- a/tests/examplefiles/example2.msc +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/mscgen -Tpng -# -# testinput2.msc : Sample msc input file with URLs -# -# This file is PUBLIC DOMAIN and may be freely reproduced, distributed, -# transmitted, used, modified, built upon, or otherwise exploited by -# anyone for any purpose, commercial or non-commercial, and in any way, -# including by methods that have not yet been invented or conceived. -# -# This file is provided "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER -# EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -# - -# Note: This is from mscgen-0.20 - -msc { - -A,B; - ---- [label="Start", ID="1"]; - -A->B [label="signal"]; -A<-B [label="signal"]; - - -A=>B [label="method"]; -A<=B [label="method"]; - -A>>B [label="return"]; -A<>B [label="call-back"]; -A<<=B [label="call-back", URL="www.google.com"]; - -A x- B [label="loss"]; -A -x B [label="loss"]; - ---- [label="Left arcs", ID="2", IDURL="www.google.co.uk"]; - -A->A [label="signal"]; -A<-A [label="signal"]; - - -A=>A [label="method"]; -A<=A [label="method"]; - -A>>A [label="return"]; -A<>A [label="call-back"]; -A<<=A [label="call-back", URL="www.google.com", ID="3"]; - -A x- A [label="loss"]; -A -x A [label="loss"]; - ---- [label="Right arcs"]; - -B->B [label="signal"]; -B<-B [label="signal"]; - - -B=>B [label="method"]; -B<=B [label="method"]; - -B>>B [label="return"]; -B<>B [label="call-back", ID="4"]; -B<<=B [label="call-back", URL="www.google.com"]; - -B x- B [label="loss"]; -B -x B [label="loss"]; - ---- [label="End of arcs", URL="www.google.com"]; - - -... [label="Some time passes", URL="www.google.com"]; -} diff --git a/tests/examplefiles/exampleScript.cfc b/tests/examplefiles/exampleScript.cfc deleted file mode 100644 index 002acbcd..00000000 --- a/tests/examplefiles/exampleScript.cfc +++ /dev/null @@ -1,241 +0,0 @@ - -/** -******************************************************************************** -ContentBox - A Modular Content Platform -Copyright 2012 by Luis Majano and Ortus Solutions, Corp -www.gocontentbox.org | www.luismajano.com | www.ortussolutions.com -******************************************************************************** -Apache License, Version 2.0 - -Copyright Since [2012] [Luis Majano and Ortus Solutions,Corp] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -******************************************************************************** -* A generic content service for content objects -*/ -component extends="coldbox.system.orm.hibernate.VirtualEntityService" singleton{ - - // DI - property name="settingService" inject="id:settingService@cb"; - property name="cacheBox" inject="cachebox"; - property name="log" inject="logbox:logger:{this}"; - property name="customFieldService" inject="customFieldService@cb"; - property name="categoryService" inject="categoryService@cb"; - property name="commentService" inject="commentService@cb"; - property name="contentVersionService" inject="contentVersionService@cb"; - property name="authorService" inject="authorService@cb"; - property name="populator" inject="wirebox:populator"; - property name="systemUtil" inject="SystemUtil@cb"; - - /* - * Constructor - * @entityName.hint The content entity name to bind this service to. - */ - ContentService function init(entityName="cbContent"){ - // init it - super.init(entityName=arguments.entityName, useQueryCaching=true); - - // Test scope coloring in pygments - this.colorTestVar = "Just for testing pygments!"; - cookie.colorTestVar = ""; - client.colorTestVar = "" - session.colorTestVar = ""; - application.colorTestVar = ""; - - return this; - } - - /** - * Clear all content caches - * @async.hint Run it asynchronously or not, defaults to false - */ - function clearAllCaches(boolean async=false){ - var settings = settingService.getAllSettings(asStruct=true); - // Get appropriate cache provider - var cache = cacheBox.getCache( settings.cb_content_cacheName ); - cache.clearByKeySnippet(keySnippet="cb-content",async=arguments.async); - return this; - } - - /** - * Clear all page wrapper caches - * @async.hint Run it asynchronously or not, defaults to false - */ - function clearAllPageWrapperCaches(boolean async=false){ - var settings = settingService.getAllSettings(asStruct=true); - // Get appropriate cache provider - var cache = cacheBox.getCache( settings.cb_content_cacheName ); - cache.clearByKeySnippet(keySnippet="cb-content-pagewrapper",async=arguments.async); - return this; - } - - /** - * Clear all page wrapper caches - * @slug.hint The slug partial to clean on - * @async.hint Run it asynchronously or not, defaults to false - */ - function clearPageWrapperCaches(required any slug, boolean async=false){ - var settings = settingService.getAllSettings(asStruct=true); - // Get appropriate cache provider - var cache = cacheBox.getCache( settings.cb_content_cacheName ); - cache.clearByKeySnippet(keySnippet="cb-content-pagewrapper-#arguments.slug#",async=arguments.async); - return this; - } - - /** - * Clear a page wrapper cache - * @slug.hint The slug to clean - * @async.hint Run it asynchronously or not, defaults to false - */ - function clearPageWrapper(required any slug, boolean async=false){ - var settings = settingService.getAllSettings(asStruct=true); - // Get appropriate cache provider - var cache = cacheBox.getCache( settings.cb_content_cacheName ); - cache.clear("cb-content-pagewrapper-#arguments.slug#/"); - return this; - } - - /** - * Searches published content with cool paramters, remember published content only - * @searchTerm.hint The search term to search - * @max.hint The maximum number of records to paginate - * @offset.hint The offset in the pagination - * @asQuery.hint Return as query or array of objects, defaults to array of objects - * @sortOrder.hint The sorting of the search results, defaults to publishedDate DESC - * @isPublished.hint Search for published, non-published or both content objects [true, false, 'all'] - * @searchActiveContent.hint Search only content titles or both title and active content. Defaults to both. - */ - function searchContent( - any searchTerm="", - numeric max=0, - numeric offset=0, - boolean asQuery=false, - any sortOrder="publishedDate DESC", - any isPublished=true, - boolean searchActiveContent=true){ - - var results = {}; - var c = newCriteria(); - - // only published content - if( isBoolean( arguments.isPublished ) ){ - // Published bit - c.isEq( "isPublished", javaCast( "Boolean", arguments.isPublished ) ); - // Published eq true evaluate other params - if( arguments.isPublished ){ - c.isLt("publishedDate", now() ) - .$or( c.restrictions.isNull("expireDate"), c.restrictions.isGT("expireDate", now() ) ) - .isEq("passwordProtection",""); - } - } - - // Search Criteria - if( len( arguments.searchTerm ) ){ - // like disjunctions - c.createAlias("activeContent","ac"); - // Do we search title and active content or just title? - if( arguments.searchActiveContent ){ - c.$or( c.restrictions.like("title","%#arguments.searchTerm#%"), - c.restrictions.like("ac.content", "%#arguments.searchTerm#%") ); - } - else{ - c.like( "title", "%#arguments.searchTerm#%" ); - } - } - - // run criteria query and projections count - results.count = c.count( "contentID" ); - results.content = c.resultTransformer( c.DISTINCT_ROOT_ENTITY ) - .list(offset=arguments.offset, max=arguments.max, sortOrder=arguments.sortOrder, asQuery=arguments.asQuery); - - return results; - } - -/********************************************* PRIVATE *********************************************/ - - - /** - * Update the content hits - * @contentID.hint The content id to update - */ - private function syncUpdateHits(required contentID){ - var q = new Query(sql="UPDATE cb_content SET hits = hits + 1 WHERE contentID = #arguments.contentID#").execute(); - return this; - } - - - private function closureTest(){ - methodCall( - param1, - function( arg1, required arg2 ){ - var settings = settingService.getAllSettings(asStruct=true); - // Get appropriate cache provider - var cache = cacheBox.getCache( settings.cb_content_cacheName ); - cache.clear("cb-content-pagewrapper-#arguments.slug#/"); - return this; - }, - param1 - ); - } - - private function StructliteralTest(){ - return { - foo = bar, - brad = 'Wood', - func = function( arg1, required arg2 ){ - var settings = settingService.getAllSettings(asStruct=true); - // Get appropriate cache provider - var cache = cacheBox.getCache( settings.cb_content_cacheName ); - cache.clear("cb-content-pagewrapper-#arguments.slug#/"); - return this; - }, - array = [ - 1, - 2, - 3, - 4, - 5, - 'test', - 'testing', - 'testerton', - { - foo = true, - brad = false, - wood = null - } - ], - last = "final" - }; - } - - private function arrayliteralTest(){ - return [ - 1, - 2, - 3, - 4, - 5, - 'test', - 'testing', - 'testerton', - { - foo = true, - brad = false, - wood = null - }, - 'testy-von-testavich' - ]; - } - -} - \ No newline at end of file diff --git a/tests/examplefiles/exampleTag.cfc b/tests/examplefiles/exampleTag.cfc deleted file mode 100644 index 753bb826..00000000 --- a/tests/examplefiles/exampleTag.cfc +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/tests/examplefiles/example_coq.v b/tests/examplefiles/example_coq.v deleted file mode 100644 index fd1a7bc8..00000000 --- a/tests/examplefiles/example_coq.v +++ /dev/null @@ -1,4 +0,0 @@ -Lemma FalseLemma : False <-> False. -tauto. -Qed. -Check FalseLemma. diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex deleted file mode 100644 index ddca7f60..00000000 --- a/tests/examplefiles/example_elixir.ex +++ /dev/null @@ -1,233 +0,0 @@ -# Numbers -0b0101011 -1234 ; 0x1A ; 0xbeef ; 0763 ; 0o123 -3.14 ; 5.0e21 ; 0.5e-12 -100_000_000 - -# these are not valid numbers -0b012 ; 0xboar ; 0o888 -0B01 ; 0XAF ; 0O123 - -# Characters -?a ; ?1 ; ?\n ; ?\s ; ?\c ; ? ; ?, -?\x{12} ; ?\x{abcd} -?\x34 ; ?\xF - -# these show that only the first digit is part of the character -?\123 ; ?\12 ; ?\7 - -# Atoms -:this ; :that -:'complex atom' -:"with' \"\" 'quotes" -:" multi - line ' \s \123 \xff -atom" -:... ; :<<>> ; :%{} ; :% ; :{} -:++; :--; :*; :~~~; ::: -:% ; :. ; :<- - -# Strings -"Hello world" -"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\\s\z\+ \\ escapes" -"Quotes ' inside \" \123 the \"\" \xF \\xF string \\\" end" -"Multiline - string" - -# Char lists -'this is a list' -'escapes \' \t \\\'' -'Multiline - char - list -' - -# Binaries -<<1, 2, 3>> -<<"hello"::binary, c :: utf8, x::[4, unit(2)]>> = "hello™1" - -# Sigils -~r/this + i\s "a" regex/ -~R'this + i\s "a" regex too' -~w(hello #{ ["has" <> "123", '\c\d', "\123 interpol" | []] } world)s -~W(hello #{no "123" \c\d \123 interpol} world)s - -~s{Escapes terminators \{ and \}, but no {balancing} # outside of sigil here } - -~S"No escapes \s\t\n and no #{interpolation}" - -:"atoms work #{"to" <> "o"}" - -# Operators -x = 1 + 2.0 * 3 -y = true and false; z = false or true -... = 144 -... == !x && y || z -"hello" |> String.upcase |> String.downcase() -{^z, a} = {true, x} - -# Free operators (added in 1.0.0) -p ~>> f = bind(p, f) -p1 ~> p2 = pair_right(p1, p2) -p1 <~ p2 = pair_left(p1, p2) -p1 <~> p2 = pair_both(p1, p2) -p |~> f = map(p, f) -p1 <|> p2 = either(p1, p2) - -# Lists, tuples, maps, keywords -[1, :a, 'hello'] ++ [2, 3] -[:head | [?t, ?a, ?i, ?l]] - -{:one, 2.0, "three"} - -[...: "this", <<>>: "is", %{}: "a keyword", %: "list", {}: "too"] -["this is an atom too": 1, "so is this": 2] -[option: "value", key: :word] -[++: "operator", ~~~: :&&&] - -map = %{shortcut: "syntax"} -%{map | "update" => "me"} -%{ 12 => 13, :weird => ['thing'] } - -# Comprehensions -for x <- 1..10, x < 5, do: {x, x} -pixels = "12345678" -for << <> <- pixels >> do - [r, {g, %{"b" => a}}] -end - -# String interpolation -"String #{inspect "interpolation"} is quite #{1+4+7} difficult" - -# Identifiers -abc_123 = 1 -_018OP = 2 -A__0 == 3 - -# Modules -defmodule Long.Module.Name do - @moduledoc "Simple module docstring" - - @doc """ - Multiline docstring - "with quotes" - and #{ inspect %{"interpolation" => "in" <> "action"} } - now with #{ {:a, 'tuple'} } - and #{ inspect { - :tuple, - %{ with: "nested #{ inspect %{ :interpolation => %{} } }" } - } } - """ - defstruct [:a, :name, :height] - - @doc ~S''' - No #{interpolation} of any kind. - \000 \x{ff} - - \n #{\x{ff}} - ''' - def func(a, b \\ []), do: :ok - - @doc false - def __before_compile__(_) do - :ok - end -end - -# Structs -defmodule Second.Module do - s = %Long.Module.Name{name: "Silly"} - %Long.Module.Name{s | height: {192, :cm}} - ".. #{%Long.Module.Name{s | height: {192, :cm}}} .." -end - -# Types, pseudo-vars, attributes -defmodule M do - @custom_attr :some_constant - - @before_compile Long.Module.Name - - @typedoc "This is a type" - @type typ :: integer - - @typedoc """ - Another type - """ - @opaque typtyp :: 1..10 - - @spec func(typ, typtyp) :: :ok | :fail - def func(a, b) do - a || b || :ok || :fail - Path.expand("..", __DIR__) - IO.inspect __ENV__ - __NOTAPSEUDOVAR__ = 11 - __MODULE__.func(b, a) - end - - defmacro m() do - __CALLER__ - end -end - -# Functions -anon = fn x, y, z -> - fn(a, b, c) -> - &(x + y - z * a / &1 + b + div(&2, c)) - end -end - -&Set.put(&1, &2) ; & Set.put(&1, &2) ; &( Set.put(&1, &1) ) - -# Function calls -anon.(1, 2, 3); self; hd([1,2,3]) -Kernel.spawn(fn -> :ok end) -IO.ANSI.black - -# Control flow -if :this do - :that -else - :otherwise -end - -pid = self -receive do - {:EXIT, _} -> :done - {^pid, :_} -> nil - after 100 -> :no_luck -end - -case __ENV__.line do - x when is_integer(x) -> x - x when x in 1..12 -> -x -end - -cond do - false -> "too bad" - 4 > 5 -> "oops" - true -> nil -end - -# Lexical scope modifiers -import Kernel, except: [spawn: 1, +: 2, /: 2, Unless: 2] -alias Long.Module.Name, as: N0men123_and4 -use Bitwise - -4 &&& 5 -2 <<< 3 - -# Protocols -defprotocol Useless do - def func1(this) - def func2(that) -end - -defimpl Useless, for: Atom do -end - -# Exceptions -defmodule NotAnError do - defexception [:message] -end - -raise NotAnError, message: "This is not an error" diff --git a/tests/examplefiles/example_file.fy b/tests/examplefiles/example_file.fy deleted file mode 100644 index 43e80c1d..00000000 --- a/tests/examplefiles/example_file.fy +++ /dev/null @@ -1,128 +0,0 @@ -class Person { - def initialize: @name age: @age { - """ - This is a docstring for the Person constructor method. - Docstrings usually are multi-line, like this one. - """ - } - - def to_s { - # return is optional in this case, but we use it nontheless - return "Person with name: #{@name inspect} and age: #{@age}" - } -} - -class PersonWithCity : Person { - def initialize: @name age: @age city: @city { - } - - def to_s { - super to_s ++ " living in: #{@city inspect}" - } -} - -p1 = Person new: "Johnny Jackson" age: 42 -p1 println # prints: Person with name: "Johnny Jackson" and age: 42 - -p2 = PersonWithCity new: "John Appleseed" age: 55 city: "New York" -p2 println # prints: Person with name: "John Appleseed" age: 55 living in: "New York" - -array = [1,2,3, "foo", 'bar] -hash = <['foo => "bar", 'bar => 42]> -tuple = (1,2,"hello","world") -block = |x, y| { - x + y println -} -block call: [4,2] - -0b010101 & 0b00101 to_s: 2 . println -0xFF & 0xAB to_s: 16 . println -0o77 > 0o76 println -123.123 + 0.222 println - -x = 0 -try { - 10 / x println -} catch ZeroDivisionError => e { - x = 3 - retry -} finally { - "Finally, done!" println -} - -def a_method: arg1 with_default_arg: arg2 (42) { - arg1 * arg2 println -} - -a_method: 42 -a_method: 42 with_default_arg: 85 - -class ClassWithClassMethod { - def self class_method1 { - 'works - } - - def ClassWithClassMethod class_method2 { - 'this_as_well - } -} - -ClassWithClassMethod class_method1 println -ClassWithClassMethod class_method2 println - -def another_method: block { - 1 upto: 10 . map: block -} - -# local returns -another_method: |x| { return_local x * 2 } . inspect println - - -# pattern matching: -class PatternMatching { - def match_it: obj { - match obj { - case String -> "It's a String!" println - case Fixnum -> "It's a Number!" println - case _ -> "Aything else!" println - } - } - - def match_with_extract: str { - match str { - # m holds the MatchData object, m1 & m2 the first and second matches - case /^(.*) : (.*)$/ -> |m, m1, m2| - "First match: #{m1}" println - "Second match: #{m2}" println - } - } -} - -pm = PatternMatching new -pm match_it: "foo" -pm match_it: 42 -pm match_it: 'foo - -pm match_with_extract: "Hello : World!" - - -# calling ruby methods: -[3, 2, 1] reverse() each() |a| { puts(a) } -"Hello" sub("ll", "y") println -[3, 2, 1] map() |a| { a * 2 } inject(0) |s i| { s + i } println - -# test symbol highlighting -['foo] -['foo?!] -{'foo} -{'foo!?} -{'foo:bar?!=&/:} -('foo) - -# future sends -42 @ to_s class println -42 @ to_s: 16 . value println - -# async sends -42 @@ println -42 @@ upto: 100 diff --git a/tests/examplefiles/ezhil_primefactors.n b/tests/examplefiles/ezhil_primefactors.n deleted file mode 100644 index 13390611..00000000 --- a/tests/examplefiles/ezhil_primefactors.n +++ /dev/null @@ -1,152 +0,0 @@ -# (C) முத்தையா அண்ணாமலை 2013 -# (A) என். சொக்கன் -# எழில் தமிழ் நிரலாக்க மொழி உதாரணம் -# Muthu A granted permission for this to be included under the BSD license -# https://bitbucket.org/birkenfeld/pygments-main/pull-requests/443/ezhil-language-lexer-for-pygments/diff - -## Prime Factors Example -## பகா எண் கூறுகளைக் கண்டறியும் உதாரணம் - -## இது நிரல் தரப்பட்ட எண்ணின் பகாஎண் கூறுகளைக் கண்டறியும் - -நிரல்பாகம் பகாஎண்ணா(எண்1) - - ## இது நிரல்பாகம் தரப்பட்ட எண் பகு எண்ணா அல்லது பகா எண்ணா என்று கண்டறிந்து சொல்லும் - ## பகுஎண் என்றால் 0 திரும்பத் தரப்படும் - ## பகாஎண் என்றால் 1 திரும்பத் தரப்படும் - - @(எண்1 < 0) ஆனால் - - ## எதிர்மறை எண்களை நேராக்குதல் - - எண்1 = எண்1 * (-1) - - முடி - - @(எண்1 < 2) ஆனால் - - ## பூஜ்ஜியம், ஒன்று ஆகியவை பகா எண்கள் அல்ல - - பின்கொடு 0 - - முடி - - @(எண்1 == 2) ஆனால் - - ## இரண்டு என்ற எண் ஒரு பகா எண் - - பின்கொடு 1 - - முடி - - மீதம் = எண்1%2 - - @(மீதம் == 0) ஆனால் - - ## இரட்டைப்படை எண், ஆகவே, இது பகா எண் அல்ல - - பின்கொடு 0 - - முடி - - எண்1வர்க்கமூலம் = எண்1^0.5 - - @(எண்2 = 3, எண்2 <= எண்1வர்க்கமூலம், எண்2 = எண்2 + 2) ஆக - - மீதம்1 = எண்1%எண்2 - - @(மீதம்1 == 0) ஆனால் - - ## ஏதேனும் ஓர் எண்ணால் முழுமையாக வகுபட்டுவிட்டது, ஆகவே அது பகா எண் அல்ல - - பின்கொடு 0 - - முடி - - முடி - - பின்கொடு 1 - -முடி - -நிரல்பாகம் பகுத்தெடு(எண்1) - - ## இது எண் தரப்பட்ட எண்ணின் பகா எண் கூறுகளைக் கண்டறிந்து பட்டியல் இடும் - - கூறுகள் = பட்டியல்() - - @(எண்1 < 0) ஆனால் - - ## எதிர்மறை எண்களை நேராக்குதல் - - எண்1 = எண்1 * (-1) - - முடி - - @(எண்1 <= 1) ஆனால் - - ## ஒன்று அல்லது அதற்குக் குறைவான எண்களுக்குப் பகா எண் விகிதம் கண்டறியமுடியாது - - பின்கொடு கூறுகள் - - முடி - - @(பகாஎண்ணா(எண்1) == 1) ஆனால் - - ## தரப்பட்ட எண்ணே பகா எண்ணாக அமைந்துவிட்டால், அதற்கு அதுவே பகாஎண் கூறு ஆகும் - - பின்இணை(கூறுகள், எண்1) - பின்கொடு கூறுகள் - - முடி - - தாற்காலிகஎண் = எண்1 - - எண்2 = 2 - - @(எண்2 <= தாற்காலிகஎண்) வரை - - விடை1 = பகாஎண்ணா(எண்2) - மீண்டும்தொடங்கு = 0 - - @(விடை1 == 1) ஆனால் - - விடை2 = தாற்காலிகஎண்%எண்2 - - @(விடை2 == 0) ஆனால் - - ## பகா எண்ணால் முழுமையாக வகுபட்டுள்ளது, அதனைப் பட்டியலில் இணைக்கிறோம் - - பின்இணை(கூறுகள், எண்2) - தாற்காலிகஎண் = தாற்காலிகஎண்/எண்2 - - ## மீண்டும் இரண்டில் தொடங்கி இதே கணக்கிடுதலைத் தொடரவேண்டும் - - எண்2 = 2 - மீண்டும்தொடங்கு = 1 - - முடி - - முடி - - @(மீண்டும்தொடங்கு == 0) ஆனால் - - ## அடுத்த எண்ணைத் தேர்ந்தெடுத்துக் கணக்கிடுதலைத் தொடரவேண்டும் - - எண்2 = எண்2 + 1 - - முடி - - முடி - - பின்கொடு கூறுகள் - -முடி - -அ = int(உள்ளீடு("உங்களுக்குப் பிடித்த ஓர் எண்ணைத் தாருங்கள்: ")) - -பகாஎண்கூறுகள் = பட்டியல்() - -பகாஎண்கூறுகள் = பகுத்தெடு(அ) - -பதிப்பி "நீங்கள் தந்த எண்ணின் பகா எண் கூறுகள் இவை: ", பகாஎண்கூறுகள் diff --git a/tests/examplefiles/fennelview.fnl b/tests/examplefiles/fennelview.fnl deleted file mode 100644 index fd0fc648..00000000 --- a/tests/examplefiles/fennelview.fnl +++ /dev/null @@ -1,156 +0,0 @@ -;; A pretty-printer that outputs tables in Fennel syntax. -;; Loosely based on inspect.lua: http://github.com/kikito/inspect.lua - -(local quote (fn [str] (.. '"' (: str :gsub '"' '\\"') '"'))) - -(local short-control-char-escapes - {"\a" "\\a" "\b" "\\b" "\f" "\\f" "\n" "\\n" - "\r" "\\r" "\t" "\\t" "\v" "\\v"}) - -(local long-control-char-esapes - (let [long {}] - (for [i 0 31] - (let [ch (string.char i)] - (when (not (. short-control-char-escapes ch)) - (tset short-control-char-escapes ch (.. "\\" i)) - (tset long ch (: "\\%03d" :format i))))) - long)) - -(fn escape [str] - (let [str (: str :gsub "\\" "\\\\") - str (: str :gsub "(%c)%f[0-9]" long-control-char-esapes)] - (: str :gsub "%c" short-control-char-escapes))) - -(fn sequence-key? [k len] - (and (= (type k) "number") - (<= 1 k) - (<= k len) - (= (math.floor k) k))) - -(local type-order {:number 1 :boolean 2 :string 3 :table 4 - :function 5 :userdata 6 :thread 7}) - -(fn sort-keys [a b] - (let [ta (type a) tb (type b)] - (if (and (= ta tb) (~= ta "boolean") - (or (= ta "string") (= ta "number"))) - (< a b) - (let [dta (. type-order a) - dtb (. type-order b)] - (if (and dta dtb) - (< dta dtb) - dta true - dtb false - :else (< ta tb)))))) - -(fn get-sequence-length [t] - (var len 1) - (each [i (ipairs t)] (set len i)) - len) - -(fn get-nonsequential-keys [t] - (let [keys {} - sequence-length (get-sequence-length t)] - (each [k (pairs t)] - (when (not (sequence-key? k sequence-length)) - (table.insert keys k))) - (table.sort keys sort-keys) - (values keys sequence-length))) - -(fn count-table-appearances [t appearances] - (if (= (type t) "table") - (when (not (. appearances t)) - (tset appearances t 1) - (each [k v (pairs t)] - (count-table-appearances k appearances) - (count-table-appearances v appearances))) - (when (and t (= t t)) ; no nans please - (tset appearances t (+ (or (. appearances t) 0) 1)))) - appearances) - - - -(var put-value nil) ; mutual recursion going on; defined below - -(fn puts [self ...] - (each [_ v (ipairs [...])] - (table.insert self.buffer v))) - -(fn tabify [self] (puts self "\n" (: self.indent :rep self.level))) - -(fn already-visited? [self v] (~= (. self.ids v) nil)) - -(fn get-id [self v] - (var id (. self.ids v)) - (when (not id) - (let [tv (type v)] - (set id (+ (or (. self.max-ids tv) 0) 1)) - (tset self.max-ids tv id) - (tset self.ids v id))) - (tostring id)) - -(fn put-sequential-table [self t length] - (puts self "[") - (set self.level (+ self.level 1)) - (for [i 1 length] - (puts self " ") - (put-value self (. t i))) - (set self.level (- self.level 1)) - (puts self " ]")) - -(fn put-key [self k] - (if (and (= (type k) "string") - (: k :find "^[-%w?\\^_`!#$%&*+./@~:|<=>]+$")) - (puts self ":" k) - (put-value self k))) - -(fn put-kv-table [self t] - (puts self "{") - (set self.level (+ self.level 1)) - (each [k v (pairs t)] - (tabify self) - (put-key self k) - (puts self " ") - (put-value self v)) - (set self.level (- self.level 1)) - (tabify self) - (puts self "}")) - -(fn put-table [self t] - (if (already-visited? self t) - (puts self "#") - (>= self.level self.depth) - (puts self "{...}") - :else - (let [(non-seq-keys length) (get-nonsequential-keys t) - id (get-id self t)] - (if (> (. self.appearances t) 1) - (puts self "#<" id ">") - (and (= (# non-seq-keys) 0) (= (# t) 0)) - (puts self "{}") - (= (# non-seq-keys) 0) - (put-sequential-table self t length) - :else - (put-kv-table self t))))) - -(set put-value (fn [self v] - (let [tv (type v)] - (if (= tv "string") - (puts self (quote (escape v))) - (or (= tv "number") (= tv "boolean") (= tv "nil")) - (puts self (tostring v)) - (= tv "table") - (put-table self v) - :else - (puts self "#<" (tostring v) ">"))))) - - - -(fn fennelview [root options] - (let [options (or options {}) - inspector {:appearances (count-table-appearances root {}) - :depth (or options.depth 128) - :level 0 :buffer {} :ids {} :max-ids {} - :indent (or options.indent " ")}] - (put-value inspector root) - (table.concat inspector.buffer))) diff --git a/tests/examplefiles/fibonacci.tokigun.aheui b/tests/examplefiles/fibonacci.tokigun.aheui deleted file mode 100644 index afa2ca05..00000000 --- a/tests/examplefiles/fibonacci.tokigun.aheui +++ /dev/null @@ -1,4 +0,0 @@ -바싹반박나싼순 -뿌멓떠벌번멍뻐 -쌀삭쌀살다순옭 -어어선썬설썩옭 diff --git a/tests/examplefiles/firefox.mak b/tests/examplefiles/firefox.mak deleted file mode 100644 index 4dc0f167..00000000 --- a/tests/examplefiles/firefox.mak +++ /dev/null @@ -1,586 +0,0 @@ -# -# ***** BEGIN LICENSE BLOCK ***** -# Version: MPL 1.1/GPL 2.0/LGPL 2.1 -# -# The contents of this file are subject to the Mozilla Public License Version -# 1.1 (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# http://www.mozilla.org/MPL/ -# -# Software distributed under the License is distributed on an "AS IS" basis, -# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License -# for the specific language governing rights and limitations under the -# License. -# -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# -# Alternatively, the contents of this file may be used under the terms of -# either the GNU General Public License Version 2 or later (the "GPL"), or -# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), -# in which case the provisions of the GPL or the LGPL are applicable instead -# of those above. If you wish to allow use of your version of this file only -# under the terms of either the GPL or the LGPL, and not to allow others to -# use your version of this file under the terms of the MPL, indicate your -# decision by deleting the provisions above and replace them with the notice -# and other provisions required by the GPL or the LGPL. If you do not delete -# the provisions above, a recipient may use your version of this file under -# the terms of any one of the MPL, the GPL or the LGPL. -# -# ***** END LICENSE BLOCK ***** - -DEPTH = . -topsrcdir = @top_srcdir@ -srcdir = @srcdir@ -VPATH = @srcdir@ - -include $(DEPTH)/config/autoconf.mk - -include $(topsrcdir)/build/unix/modules.mk - -ifeq ($(BUILD_MODULES),all) -# -# And now for something completely different... -# Divide the default build into tiers. -# Tiers must be defined on module boundaries -# -SUPPRESS_DEFAULT_RULES = 1 - -default alldep all:: $(SUBMAKEFILES) - $(RM) -rf $(DIST)/sdk - $(RM) -rf $(DIST)/include - $(MAKE) -C config export - $(MAKE) nspr - $(MAKE) ldap - $(MAKE) tier_0 - $(MAKE) tier_1 - $(MAKE) tier_2 - $(MAKE) tier_9 - $(MAKE) tier_50 - $(MAKE) tier_99 - -# Make sure that the existing rulesets work -DIRS = \ - $(tier_0_dirs) \ - $(tier_1_dirs) \ - $(tier_2_dirs) \ - $(tier_9_dirs) \ - $(tier_50_dirs) \ - $(NULL) - -ifdef GC_LEAK_DETECTOR -DIRS += gc/boehm -endif - -DIRS += $(tier_99_dirs) - -# -# tier 0 - base build config dirs -# -tier_0_dirs = \ - config \ - build \ - $(NULL) - -# -# tier 1 - 3rd party individual libraries -# -tier_1_dirs += dbm - -ifndef MOZ_NATIVE_JPEG -tier_1_dirs += jpeg -endif - -ifndef MOZ_NATIVE_ZLIB -tier_1_dirs += modules/zlib -endif - -# Installer needs standalone libjar, hence standalone zlib -ifdef MOZ_INSTALLER -tier_1_dirs += modules/zlib/standalone -endif - -ifdef MOZ_UPDATER -tier_1_dirs += modules/libbz2 -tier_1_dirs += modules/libmar -endif - -ifdef MOZ_SVG_RENDERER_LIBART -tier_1_dirs += other-licenses/libart_lgpl -endif - -# -# tier 2 - base libraries -# -tier_2_dirs = \ - js \ - xpcom \ - $(NULL) - -ifndef MOZ_NO_XPCOM_OBSOLETE -tier_2_dirs += modules/libreg xpcom/obsolete -endif - -ifdef NS_TRACE_MALLOC -tier_2_dirs += tools/trace-malloc/lib -endif - -# -# tier 9 - core components (necko,gecko) -# - -tier_9_dirs += \ - js/src/xpconnect \ - intl \ - db \ - $(NULL) - -ifdef MOZ_STORAGE -tier_9_dirs += storage -endif - -ifdef MOZ_ENABLE_XLIB -tier_9_dirs += gfx/src/xlibrgb widget/src/xlibxtbin -endif - -ifdef MOZ_ENABLE_GTK -tier_9_dirs += widget/src/gtksuperwin widget/src/gtkxtbin -endif - -ifdef MOZ_ENABLE_GTK2 -tier_9_dirs += widget/src/gtkxtbin -endif - -ifdef MOZ_IPCD -tier_9_dirs += ipc/ipcd -endif - -ifdef MOZ_JSDEBUGGER -tier_9_dirs += js/jsd -endif - -tier_9_dirs += \ - modules/libutil \ - netwerk \ - modules/libjar \ - uriloader \ - modules/libpref \ - modules/libimg \ - caps \ - rdf \ - parser/expat \ - parser/xml \ - parser/htmlparser \ - gfx \ - modules/libpr0n \ - sun-java \ - modules/plugin \ - dom \ - view \ - widget \ - content \ - layout \ - xpfe/components/shistory \ - docshell \ - webshell \ - embedding \ - editor \ - xpfe/appshell \ - $(NULL) - -ifdef MOZ_OJI -tier_9_dirs += \ - js/src/liveconnect \ - modules/oji \ - $(NULL) -endif - -ifdef ACCESSIBILITY -tier_9_dirs += accessible -endif - -# -# tier 50 - xpfe & toolkit -# - -ifdef MOZ_XUL -ifdef MOZ_XUL_APP -tier_50_dirs += chrome -else -tier_50_dirs += rdf/chrome -endif -else -tier_50_dirs += embedding/minimo/chromelite -endif - -tier_50_dirs += profile - -# This must preceed xpfe -ifdef MOZ_JPROF -tier_50_dirs += tools/jprof -endif - -ifneq (,$(filter mac cocoa,$(MOZ_WIDGET_TOOLKIT))) -tier_50_dirs += xpfe/bootstrap/appleevents -endif - -tier_50_dirs += \ - xpfe \ - toolkit/components \ - $(NULL) - -ifndef MOZ_XUL_APP -tier_50_dirs += themes -endif - -ifdef MOZ_ENABLE_XREMOTE -tier_50_dirs += widget/src/xremoteclient -endif - -ifdef MOZ_XUL_APP -tier_50_dirs += toolkit -endif - -ifdef MOZ_PHOENIX -#XXXBlake this shell path is a temp hack; toolkit shouldn't depend on browser -tier_50_dirs += browser/components/shell/public -endif - -ifdef MOZ_XPINSTALL -tier_50_dirs += xpinstall -endif - -# JavaXPCOM JNI code is compiled into libXUL -ifdef MOZ_JAVAXPCOM -tier_50_dirs += extensions/java/xpcom/src -endif - -ifdef MOZ_ENABLE_LIBXUL -tier_50_dirs += \ - toolkit/library \ - xpcom/stub \ - $(NULL) -endif - -ifdef NS_TRACE_MALLOC -tier_50_dirs += tools/trace-malloc -endif - -ifdef MOZ_PSM -tier_50_dirs += security/manager -else -tier_50_dirs += security/manager/boot/public security/manager/ssl/public -endif - -ifdef MOZ_LDAP_XPCOM -tier_50_dirs += directory/xpcom -endif - -ifndef MINIMO -ifdef MOZ_XUL_APP -ifdef MOZ_ENABLE_GTK2 -tier_50_dirs += toolkit/components/gnome -endif -endif -endif - -ifdef MOZ_LEAKY -tier_50_dirs += tools/leaky -endif - -ifdef MOZ_MAPINFO -tier_50_dirs += tools/codesighs -endif - -# -# tier 99 - application features -# - -ifdef MOZ_MAIL_NEWS -tier_99_dirs += mailnews -endif - -ifdef MOZ_CALENDAR -tier_99_dirs += calendar -endif - -ifdef MOZ_EXTENSIONS -tier_99_dirs += extensions -endif - -ifdef MOZ_JAVAXPCOM -tier_99_dirs += extensions/java -endif - -# axcontrol -ifeq ($(OS_ARCH),WINNT) -ifndef MOZ_NO_ACTIVEX_SUPPORT -tier_99_dirs += \ - embedding/browser/activex/src/control \ - embedding/browser/activex/src/control_kicker \ - $(NULL) -endif -endif - -# Java Embedding Plugin -ifneq (,$(filter mac cocoa,$(MOZ_WIDGET_TOOLKIT))) -tier_99_dirs += plugin/oji/JEP -endif - -ifneq (,$(filter browser suite,$(MOZ_BUILD_APP))) -tier_99_dirs += xpfe/components/search -endif - -ifdef MOZ_BRANDING_DIRECTORY -tier_99_dirs += $(MOZ_BRANDING_DIRECTORY) -endif - -ifdef MOZ_PHOENIX -tier_99_dirs += browser xpfe/bootstrap/init.d -endif - -ifdef MOZ_XULRUNNER -tier_99_dirs += xulrunner -endif - -ifdef MOZ_COMPOSER -tier_99_dirs += editor/ui -endif - -ifdef MOZ_THUNDERBIRD -tier_99_dirs += mail xpfe/bootstrap/init.d -endif - -ifdef MOZ_STANDALONE_COMPOSER -tier_99_dirs += composer -endif - -ifdef MOZ_SUNBIRD -tier_99_dirs += calendar/sunbird -endif - -ifdef MOZ_SUITE -tier_99_dirs += suite -endif - -ifdef MINIMO -tier_99_dirs += minimo -endif - -ifdef MOZ_XUL_APP -ifdef MOZ_INSTALLER -tier_99_dirs += toolkit/mozapps/installer -endif -else -ifneq (,$(MOZ_XPFE_COMPONENTS)$(MOZ_XUL)) -ifndef MINIMO -tier_99_dirs += xpfe/bootstrap -endif -endif -endif - -ifneq (,$(MOZ_ENABLE_GTK)$(MOZ_ENABLE_GTK2)) -tier_99_dirs += embedding/browser/gtk -endif - -# viewer -ifneq (,$(ENABLE_TESTS)) -ifndef MOZ_ENABLE_LIBXUL -tier_99_dirs += webshell/tests -endif -endif - -# winembed, mfcembed -ifeq ($(OS_ARCH),WINNT) -ifneq (,$(ENABLE_TESTS)$(MOZILLA_OFFICIAL)) -tier_99_dirs += embedding/tests -endif -endif - -# os2embed -ifeq ($(OS_ARCH),OS2) -ifneq (,$(ENABLE_TESTS)$(MOZILLA_OFFICIAL)) -tier_99_dirs += embedding/tests -endif -endif - -ifeq ($(MOZ_BUILD_APP),macbrowser) -tier_99_dirs += \ - embedding/config \ - camino \ - $(NULL) -endif - -# test harnesses -ifdef ENABLE_TESTS -tier_99_dirs += tools/test-harness -endif - -else - -# Standalone build - -DIRS = $(BUILD_MODULE_DIRS) - -# Hack to generate xpidl Makefile -ifneq ($(BUILD_MODULES),all) -ifneq (,$(findstring xpcom, $(BUILD_MODULE_DIRS))) -DIRS := xpcom/typelib $(DIRS) -SUBMAKEFILES := xpcom/typelib/Makefile -endif -endif - -default:: $(SUBMAKEFILES) - $(MAKE) export - $(MAKE) libs - -endif # BUILD_MODULES == all - -STATIC_MAKEFILES := nsprpub directory/c-sdk security/nss - -GARBAGE_DIRS += dist -DIST_GARBAGE = config.cache config.log config.status config-defs.h \ - dependencies.beos config/autoconf.mk config/myrules.mk config/myconfig.mk \ - unallmakefiles mozilla-config.h \ - $(topsrcdir)/.mozconfig.mk $(topsrcdir)/.mozconfig.out - -# Build pseudo-external modules first when export is explicitly called -export:: - $(RM) -rf $(DIST)/sdk - $(MAKE) -C config export - $(MAKE) nspr - $(MAKE) ldap -ifneq ($(BUILD_MODULES),all) -ifneq (,$(findstring xpcom, $(BUILD_MODULE_DIRS))) - $(MAKE) -C xpcom/typelib - $(MAKE) export-idl -endif -endif - -install:: -ifndef MOZ_NATIVE_NSPR - $(MAKE) -C nsprpub real_install DESTDIR=$(DESTDIR) libdir=$(mozappdir) includedir=$(includedir)/nspr - $(RM) -f $(addprefix $(DESTDIR)$(mozappdir)/$(LIB_PREFIX), $(addsuffix .$(LIB_SUFFIX), nspr4 plds4 plc4)) - $(RM) -f $(addprefix $(DESTDIR)$(bindir)/,nspr-config compile-et.pl prerr.properties) -endif -ifdef MOZ_LDAP_XPCOM - $(MAKE) -C directory/c-sdk real_install DESTDIR=$(DESTDIR) libdir=$(mozappdir) includedir=$(includedir)/ldap -endif - -include $(topsrcdir)/config/rules.mk - -# Clean up after pseudo-external modules -clean clobber realclean clobber_all distclean:: -ifndef MOZ_NATIVE_NSPR - $(MAKE) -C nsprpub $@ -endif -ifdef MOZ_LDAP_XPCOM - $(MAKE) -C directory/c-sdk $@ -endif - -# Map mozilla targets to standard automake target -ifdef MOZ_ENABLE_LIBXUL -tier_50: $(addsuffix /Makefile, $(filter-out $(STATIC_MAKEFILES), $($@_dirs))) - @echo "tier_50: $(tier_50_dirs)" - @$(EXIT_ON_ERROR) \ - for d in $(tier_50_dirs); do \ - $(UPDATE_TITLE) \ - if test ! -f $$d/Makefile; then \ - $(PERL) $(AUTOCONF_TOOLS)/make-makefile -t $(topsrcdir) -d $(DEPTH) $(CYGWIN_TOPSRCDIR) $$d/Makefile; \ - fi; \ - $(MAKE) -C $$d export; \ - done ; \ - for d in $(tier_50_dirs); do \ - $(UPDATE_TITLE) \ - $(MAKE) -C $$d libs; \ - done - @echo "Building tools from tier 2/9/50" - @$(EXIT_ON_ERROR) \ - for d in $(tier_2_dirs) $(tier_9_dirs) $(tier_50_dirs); do \ - $(UPDATE_TITLE) \ - $(MAKE) -C $$d tools; \ - done; -endif - -tier_%: - @echo "$@: $($@_dirs)" - @$(EXIT_ON_ERROR) \ - for d in $($@_dirs); do \ - $(UPDATE_TITLE) \ - if test ! -f $$d/Makefile; then \ - $(PERL) $(AUTOCONF_TOOLS)/make-makefile -t $(topsrcdir) -d $(DEPTH) $(CYGWIN_TOPSRCDIR) $$d/Makefile; \ - fi; \ - $(MAKE) -C $$d export; \ - done ; \ - for d in $($@_dirs); do $(UPDATE_TITLE) \ - $(MAKE) -C $$d libs; \ - done - -# -# Individual modules -# -boehm: -ifdef GC_LEAK_DETECTOR - $(MAKE) -C gc/boehm -endif - -nspr: boehm -ifndef MOZ_NATIVE_NSPR - $(MAKE) -C nsprpub -endif - -ldap: -ifdef MOZ_LDAP_XPCOM - $(MAKE) -C directory/c-sdk -endif - -distclean:: - cat unallmakefiles | $(XARGS) rm -f - rm -f unallmakefiles $(DIST_GARBAGE) - -ifeq ($(OS_ARCH),WINNT) -rebase: -ifdef MOZILLA_OFFICIAL - echo rebasing $(DIST) - /bin/find $(DIST) -name "*.dll" > rebase.lst - rebase -b 60000000 -R . -G rebase.lst - rm rebase.lst -endif - -splitsymbols: -ifdef MOZILLA_OFFICIAL -ifdef MOZ_DEBUG_SYMBOLS - echo finding pdb files - mkdir -p $(DIST)/$(BUILDID) - -cp `/bin/find . -path "./dist" -prune -o -name "*.dll" | sed "s/\.dll$$/\.pdb/" | xargs` $(DIST)/$(BUILDID) - -cp `/bin/find . -path "./dist" -prune -o -name "*.exe" | sed "s/\.exe$$/\.pdb/" | xargs` $(DIST)/$(BUILDID) - -cp `/bin/find . -path "./dist" -prune -o -name "*.EXE" | sed "s/\.EXE$$/\.pdb/" | xargs` $(DIST)/$(BUILDID) -endif # MOZ_DEBUG_SYMBOLS -ifdef MOZ_PROFILE - echo splitting symbols out of binaries - /bin/find $(DIST) -name "*.dll" -exec splitsym {} \; - /bin/find $(DIST) -name "*.exe" -exec splitsym {} \; - /bin/find $(DIST) -name "*.EXE" -exec splitsym {} \; - mkdir -p $(DIST)/$(BUILDID) - /bin/find $(DIST) -name "*.dbg" -exec mv {} $(DIST)/$(BUILDID) \; -endif # MOZ_PROFILE -endif # MOZILLA_OFFICIAL - -signnss: -ifdef MOZILLA_OFFICIAL - echo signing NSS libs - cd $(DIST)/bin; ./shlibsign.exe -v -i softokn3.dll - cd $(DIST)/bin; ./shlibsign.exe -v -i freebl3.dll -endif # MOZILLA_OFFICIAL - -BUILDID = $(shell cat $(DEPTH)/config/build_number) -deliver: splitsymbols rebase signnss - -endif # WINNT - diff --git a/tests/examplefiles/flatline_example b/tests/examplefiles/flatline_example deleted file mode 100644 index 5ea73408..00000000 --- a/tests/examplefiles/flatline_example +++ /dev/null @@ -1,186 +0,0 @@ -(field "another field" 2) -(f "000001" -2) - -(missing? "a field" 23) - -(random-value "age") -(weighted-random-value "000001") - -(if (missing? "00000") (random-value "000000") (f "000000")) - -(ensure-value "000000") -(ensure-weighted-value "000000") - -(normalize "000001") -(normalize "length" 8 23) - -(z-score "a numeric field") -(z-score 23) - -(field-prop string "00023" name) -(field-prop numeric "00023" summary missing_count) - -(category-count "species" "Iris-versicolor") -(category-count "species" (f "000004")) -(bin-count "age" (f "bin-selector")) -(bin-center "000003" 3) -(bin-center (field "field-selector") 4) - -(let (v (f "age")) - (cond (< v 2) "baby" - (< v 10) "child" - (< v 20) "teenager" - "adult")) - -(segment-label "000000" "baby" 2 "child" 10 "teenager" 20 "adult") -(segment-label 0 "1st fourth" "2nd fourth" "3rd fourth" "4th fourth") - -(let (max (maximum 0) - min (minimum 0) - step (/ (- max min) 4)) - (segment-label 0 "1st fourth" (+ min step) - "2nd fourth" (+ min step step) - "3rd fourth" (+ min step step step) - "4th fourth")) - -(contains-items? "000000" "blue" "green" "darkblue") - -(<= (percentile "age" 0.5) (f "age") (percentile "age" 0.95)) - -(within-percentiles? "age" 0.5 0.95) - -(percentile-label "000023" "1st" "2nd" "3rd" "4th") - -(cond (within-percentiles? "000023" 0 0.25) "1st" - (within-percentiles? "000023" 0.25 0.5) "2nd" - (within-percentiles? "000023" 0.5 0.75) "3rd" - "4th") - -(str 1 "hello " (field "a")) -(str "value_" (+ 3 4) "/" (name "000001")) - -(length "abc") -(length "") - -(levenshtein (f 0) "a random string") -(if (< (levenshtein (f 0) "bluething") 5) "bluething" (f 0)) - -(occurrences "howdy woman, howdy" "howdy") -(occurrences "howdy woman" "Man" true) -(occurrences "howdy man" "Man" true) -(occurrences "hola, Holas" "hola" true "es") - -(md5 "a text") -(sha1 "a text") -(sha256 "") - -(matches? (field "name") ".*\\sHal\\s.*") -(matches? (field "name") "(?i).*\\shal\\s.*") - -(if (matches? (f "result") (re-quote (f "target"))) "GOOD" "MISS") -(matches? (f "name") (str "^" (re-quote (f "salutation")) "\\s *$")) - -(replace "Almost Pig Latin" "\\b(\\w)(\\w+)\\b" "$2$1ay") -(replace-first "swap first two words" "(\\w+)(\\s+)(\\w+)" "$3$2$1") - -(language "this is an English phrase") - -(< (field 0) (field 1)) -(<= (field 0 -1) (field 0) (field 0 1)) -(> (field "date") "07-14-1969") -(>= 23 (f "000004" -2)) - -(= "Dante" (field "Author")) -(= 1300 (field "Year")) -(= (field "Year" -2) (field "Year" -1) (field "Year")) -(!= (field "00033" -1) (field "00033" 1)) - -(and (= 3 (field 1)) (= "meh" (f "a")) (< (f "pregnancies") 5)) -(not true) - -(linear-regression 1 1 2 2 3 3 4 4) -(linear-regression 2.0 3.1 2.3 3.3 24.3 45.2) - -(epoch-fields (f "milliseconds")) -(epoch-year (* 1000 (f "seconds"))) - -(/ (f "a-datetime-string") 1000) -(/ (epoch (f "a-datetime-string")) 1000) - -(epoch-fields (epoch "1969-14-07T06:00:12")) -(epoch-hour (epoch "11~22~30" "hh~mm~ss")) - -(let (x (+ (window "a" -10 10)) - a (/ (* x 3) 4.34) - y (if (< a 10) "Good" "Bad")) - (list x (str (f 10) "-" y) a y)) - -(list (let (z (f 0)) (* 2 (* z z) (log z))) - (let (pi 3.141592653589793 r (f "radius")) (* 4 pi r r))) - -(if (< (field "age") 18) "non-adult" "adult") - -(if (= "oh" (field "000000")) "OH") - -(if (> (field "000001") (mean "000001")) - "above average" - (if (< (field "000001") (mean "000001")) - "below average" - "mediocre")) - -(cond (> (f "000001") (mean "000001")) "above average" - (= (f "000001") (mean "000001")) "below average" - "mediocre") - -(cond (or (= "a" (f 0)) (= "a+" (f 0))) 1 - (or (= "b" (f 0)) (= "b+" (f 0))) 0 - (or (= "c" (f 0)) (= "c+" (f 0))) -1) - -(cond (< (f "age") 2) "baby" - (and (<= 2 (f "age") 10) (= "F" (f "sex"))) "girl" - (and (<= 2 (f "age") 10) (= "M" (f "sex"))) "boy" - (< 10 (f "age") 20) "teenager" - "adult") - -(list (field "age") - (field "weight" -1) - (population "age")) - -(list 1.23 - (if (< (field "age") 10) "child" "adult") - (field 3)) - -(head (cons x lst)) -(tail (cons x lst)) - -(count (list (f 1) (f 2))) -(mode (list a b b c b a c c c)) -(max (list -1 2 -2 0.38)) -(min (list -1.3 2 1)) -(avg (list -1 -2 1 2 0.8 -0.8)) - -(in 3 (1 2 3 2)) -(in "abc" (1 2 3)) -(in (f "size") ("X" "XXL")) - -(< _ 3) -(+ (f "000001" _) 3) -(< -18 _ (f 3)) - -(map (* 2 _) (list (f 0 -1) (f 0) (f 0 1))) - -(all-but "id" "000023") -(fields "000003" 3 "a field" "another" "0002a3b-3") - -(all-with-defaults "species" "Iris-versicolor" - "petal-width" 2.8 - "000002" 0) - -(all-with-numeric-default "median") -(all-with-numeric-default 0) - -(window "000001" -1 2) -(filter (< _ 99.9) (map (+ 32 (* 1.8 _)) (window "Temp" -2 0))) - -(let (now (f "epoch")) - (avg (cond-window "temperature" (< (- (f "epoch") now) 240)))) diff --git a/tests/examplefiles/flipflop.sv b/tests/examplefiles/flipflop.sv deleted file mode 100644 index fe52ed44..00000000 --- a/tests/examplefiles/flipflop.sv +++ /dev/null @@ -1,19 +0,0 @@ -module toplevel(clock,reset); - input clock; - input reset; - - reg flop1; - reg flop2; - - always @ (posedge reset or posedge clock) - if (reset) - begin - flop1 <= 0; - flop2 <= 1; - end - else - begin - flop1 <= flop2; - flop2 <= flop1; - end -endmodule diff --git a/tests/examplefiles/foo.sce b/tests/examplefiles/foo.sce deleted file mode 100644 index 0e5d6afe..00000000 --- a/tests/examplefiles/foo.sce +++ /dev/null @@ -1,6 +0,0 @@ -// Scilab -// -disp(%pi); - -assert_checkequal(2+2,4); - diff --git a/tests/examplefiles/format.ml b/tests/examplefiles/format.ml deleted file mode 100644 index 49b40678..00000000 --- a/tests/examplefiles/format.ml +++ /dev/null @@ -1,1213 +0,0 @@ -(***********************************************************************) -(* *) -(* Objective Caml *) -(* *) -(* Pierre Weis, projet Cristal, INRIA Rocquencourt *) -(* *) -(* Copyright 1996 Institut National de Recherche en Informatique et *) -(* en Automatique. All rights reserved. This file is distributed *) -(* under the terms of the GNU Library General Public License, with *) -(* the special exception on linking described in file ../LICENSE. *) -(* *) -(***********************************************************************) - -(* $Id: format.ml,v 1.65 2005/09/26 10:13:08 weis Exp $ *) - -(************************************************************** - - Data structures definitions. - - **************************************************************) - -type size;; - -external size_of_int : int -> size = "%identity";; -external int_of_size : size -> int = "%identity";; - -(* Tokens are one of the following : *) - -type pp_token = -| Pp_text of string (* normal text *) -| Pp_break of int * int (* complete break *) -| Pp_tbreak of int * int (* go to next tabulation *) -| Pp_stab (* set a tabulation *) -| Pp_begin of int * block_type (* beginning of a block *) -| Pp_end (* end of a block *) -| Pp_tbegin of tblock (* beginning of a tabulation block *) -| Pp_tend (* end of a tabulation block *) -| Pp_newline (* to force a newline inside a block *) -| Pp_if_newline (* to do something only if this very - line has been broken *) -| Pp_open_tag of string (* opening a tag name *) -| Pp_close_tag (* closing the most recently opened tag *) - -and tag = string - -and block_type = -| Pp_hbox (* Horizontal block no line breaking *) -| Pp_vbox (* Vertical block each break leads to a new line *) -| Pp_hvbox (* Horizontal-vertical block: same as vbox, except if this block - is small enough to fit on a single line *) -| Pp_hovbox (* Horizontal or Vertical block: breaks lead to new line - only when necessary to print the content of the block *) -| Pp_box (* Horizontal or Indent block: breaks lead to new line - only when necessary to print the content of the block, or - when it leads to a new indentation of the current line *) -| Pp_fits (* Internal usage: when a block fits on a single line *) - -and tblock = Pp_tbox of int list ref (* Tabulation box *) -;; - -(* The Queue: - contains all formatting elements. - elements are tuples (size, token, length), where - size is set when the size of the block is known - len is the declared length of the token. *) -type pp_queue_elem = { - mutable elem_size : size; token : pp_token; length : int -};; - -(* Scan stack: - each element is (left_total, queue element) where left_total - is the value of pp_left_total when the element has been enqueued. *) -type pp_scan_elem = Scan_elem of int * pp_queue_elem;; - -(* Formatting stack: - used to break the lines while printing tokens. - The formatting stack contains the description of - the currently active blocks. *) -type pp_format_elem = Format_elem of block_type * int;; - -(* General purpose queues, used in the formatter. *) -type 'a queue_elem = | Nil | Cons of 'a queue_cell -and 'a queue_cell = {mutable head : 'a; mutable tail : 'a queue_elem};; - -type 'a queue = { - mutable insert : 'a queue_elem; - mutable body : 'a queue_elem -};; - -(* The formatter specific tag handling functions. *) -type formatter_tag_functions = { - mark_open_tag : tag -> string; - mark_close_tag : tag -> string; - print_open_tag : tag -> unit; - print_close_tag : tag -> unit; - -};; - -(* A formatter with all its machinery. *) -type formatter = { - mutable pp_scan_stack : pp_scan_elem list; - mutable pp_format_stack : pp_format_elem list; - mutable pp_tbox_stack : tblock list; - mutable pp_tag_stack : tag list; - mutable pp_mark_stack : tag list; - (* Global variables: default initialization is - set_margin 78 - set_min_space_left 0. *) - (* Value of right margin. *) - mutable pp_margin : int; - (* Minimal space left before margin, when opening a block. *) - mutable pp_min_space_left : int; - (* Maximum value of indentation: - no blocks can be opened further. *) - mutable pp_max_indent : int; - (* Space remaining on the current line. *) - mutable pp_space_left : int; - (* Current value of indentation. *) - mutable pp_current_indent : int; - (* True when the line has been broken by the pretty-printer. *) - mutable pp_is_new_line : bool; - (* Total width of tokens already printed. *) - mutable pp_left_total : int; - (* Total width of tokens ever put in queue. *) - mutable pp_right_total : int; - (* Current number of opened blocks. *) - mutable pp_curr_depth : int; - (* Maximum number of blocks which can be simultaneously opened. *) - mutable pp_max_boxes : int; - (* Ellipsis string. *) - mutable pp_ellipsis : string; - (* Output function. *) - mutable pp_output_function : string -> int -> int -> unit; - (* Flushing function. *) - mutable pp_flush_function : unit -> unit; - (* Output of new lines. *) - mutable pp_output_newline : unit -> unit; - (* Output of indentation spaces. *) - mutable pp_output_spaces : int -> unit; - (* Are tags printed ? *) - mutable pp_print_tags : bool; - (* Are tags marked ? *) - mutable pp_mark_tags : bool; - (* Find opening and closing markers of tags. *) - mutable pp_mark_open_tag : tag -> string; - mutable pp_mark_close_tag : tag -> string; - mutable pp_print_open_tag : tag -> unit; - mutable pp_print_close_tag : tag -> unit; - (* The pretty-printer queue. *) - mutable pp_queue : pp_queue_elem queue -};; - -(************************************************************** - - Auxilliaries and basic functions. - - **************************************************************) - - -(* Queues auxilliaries. *) -let make_queue () = {insert = Nil; body = Nil};; - -let clear_queue q = q.insert <- Nil; q.body <- Nil;; - -let add_queue x q = - let c = Cons {head = x; tail = Nil} in - match q with - | {insert = Cons cell} -> q.insert <- c; cell.tail <- c - (* Invariant: when insert is Nil body should be Nil. *) - | _ -> q.insert <- c; q.body <- c;; - -exception Empty_queue;; - -let peek_queue = function - | {body = Cons {head = x}} -> x - | _ -> raise Empty_queue;; - -let take_queue = function - | {body = Cons {head = x; tail = tl}} as q -> - q.body <- tl; - if tl = Nil then q.insert <- Nil; (* Maintain the invariant. *) - x - | _ -> raise Empty_queue;; - -(* Enter a token in the pretty-printer queue. *) -let pp_enqueue state ({length = len} as token) = - state.pp_right_total <- state.pp_right_total + len; - add_queue token state.pp_queue;; - -let pp_clear_queue state = - state.pp_left_total <- 1; state.pp_right_total <- 1; - clear_queue state.pp_queue;; - -(* Pp_infinity: large value for default tokens size. - - Pp_infinity is documented as being greater than 1e10; to avoid - confusion about the word ``greater'', we choose pp_infinity greater - than 1e10 + 1; for correct handling of tests in the algorithm, - pp_infinity must be even one more than 1e10 + 1; let's stand on the - safe side by choosing 1.e10+10. - - Pp_infinity could probably be 1073741823 that is 2^30 - 1, that is - the minimal upper bound for integers; now that max_int is defined, - this limit could also be defined as max_int - 1. - - However, before setting pp_infinity to something around max_int, we - must carefully double-check all the integer arithmetic operations - that involve pp_infinity, since any overflow would wreck havoc the - pretty-printing algorithm's invariants. Given that this arithmetic - correctness check is difficult and error prone and given that 1e10 - + 1 is in practice large enough, there is no need to attempt to set - pp_infinity to the theoretically maximum limit. Is it not worth the - burden ! *) - -let pp_infinity = 1000000010;; - -(* Output functions for the formatter. *) -let pp_output_string state s = state.pp_output_function s 0 (String.length s) -and pp_output_newline state = state.pp_output_newline ();; - -let pp_display_blanks state n = state.pp_output_spaces n;; - -(* To format a break, indenting a new line. *) -let break_new_line state offset width = - pp_output_newline state; - state.pp_is_new_line <- true; - let indent = state.pp_margin - width + offset in - (* Don't indent more than pp_max_indent. *) - let real_indent = min state.pp_max_indent indent in - state.pp_current_indent <- real_indent; - state.pp_space_left <- state.pp_margin - state.pp_current_indent; - pp_display_blanks state state.pp_current_indent;; - -(* To force a line break inside a block: no offset is added. *) -let break_line state width = break_new_line state 0 width;; - -(* To format a break that fits on the current line. *) -let break_same_line state width = - state.pp_space_left <- state.pp_space_left - width; - pp_display_blanks state width;; - -(* To indent no more than pp_max_indent, if one tries to open a block - beyond pp_max_indent, then the block is rejected on the left - by simulating a break. *) -let pp_force_break_line state = - match state.pp_format_stack with - | Format_elem (bl_ty, width) :: _ -> - if width > state.pp_space_left then - (match bl_ty with - | Pp_fits -> () | Pp_hbox -> () | _ -> break_line state width) - | _ -> pp_output_newline state;; - -(* To skip a token, if the previous line has been broken. *) -let pp_skip_token state = - (* When calling pp_skip_token the queue cannot be empty. *) - match take_queue state.pp_queue with - {elem_size = size; length = len} -> - state.pp_left_total <- state.pp_left_total - len; - state.pp_space_left <- state.pp_space_left + int_of_size size;; - -(************************************************************** - - The main pretting printing functions. - - **************************************************************) - -(* To format a token. *) -let format_pp_token state size = function - - | Pp_text s -> - state.pp_space_left <- state.pp_space_left - size; - pp_output_string state s; - state.pp_is_new_line <- false - - | Pp_begin (off, ty) -> - let insertion_point = state.pp_margin - state.pp_space_left in - if insertion_point > state.pp_max_indent then - (* can't open a block right there. *) - begin pp_force_break_line state end; - let offset = state.pp_space_left - off in - let bl_type = - begin match ty with - | Pp_vbox -> Pp_vbox - | _ -> if size > state.pp_space_left then ty else Pp_fits - end in - state.pp_format_stack <- - Format_elem (bl_type, offset) :: state.pp_format_stack - - | Pp_end -> - begin match state.pp_format_stack with - | x :: (y :: l as ls) -> state.pp_format_stack <- ls - | _ -> () (* No more block to close. *) - end - - | Pp_tbegin (Pp_tbox _ as tbox) -> - state.pp_tbox_stack <- tbox :: state.pp_tbox_stack - - | Pp_tend -> - begin match state.pp_tbox_stack with - | x :: ls -> state.pp_tbox_stack <- ls - | _ -> () (* No more tabulation block to close. *) - end - - | Pp_stab -> - begin match state.pp_tbox_stack with - | Pp_tbox tabs :: _ -> - let rec add_tab n = function - | [] -> [n] - | x :: l as ls -> if n < x then n :: ls else x :: add_tab n l in - tabs := add_tab (state.pp_margin - state.pp_space_left) !tabs - | _ -> () (* No opened tabulation block. *) - end - - | Pp_tbreak (n, off) -> - let insertion_point = state.pp_margin - state.pp_space_left in - begin match state.pp_tbox_stack with - | Pp_tbox tabs :: _ -> - let rec find n = function - | x :: l -> if x >= n then x else find n l - | [] -> raise Not_found in - let tab = - match !tabs with - | x :: l -> - begin try find insertion_point !tabs with Not_found -> x end - | _ -> insertion_point in - let offset = tab - insertion_point in - if offset >= 0 then break_same_line state (offset + n) else - break_new_line state (tab + off) state.pp_margin - | _ -> () (* No opened tabulation block. *) - end - - | Pp_newline -> - begin match state.pp_format_stack with - | Format_elem (_, width) :: _ -> break_line state width - | _ -> pp_output_newline state - end - - | Pp_if_newline -> - if state.pp_current_indent != state.pp_margin - state.pp_space_left - then pp_skip_token state - - | Pp_break (n, off) -> - begin match state.pp_format_stack with - | Format_elem (ty, width) :: _ -> - begin match ty with - | Pp_hovbox -> - if size > state.pp_space_left - then break_new_line state off width - else break_same_line state n - | Pp_box -> - (* Have the line just been broken here ? *) - if state.pp_is_new_line then break_same_line state n else - if size > state.pp_space_left - then break_new_line state off width else - (* break the line here leads to new indentation ? *) - if state.pp_current_indent > state.pp_margin - width + off - then break_new_line state off width - else break_same_line state n - | Pp_hvbox -> break_new_line state off width - | Pp_fits -> break_same_line state n - | Pp_vbox -> break_new_line state off width - | Pp_hbox -> break_same_line state n - end - | _ -> () (* No opened block. *) - end - - | Pp_open_tag tag_name -> - let marker = state.pp_mark_open_tag tag_name in - pp_output_string state marker; - state.pp_mark_stack <- tag_name :: state.pp_mark_stack - - | Pp_close_tag -> - begin match state.pp_mark_stack with - | tag_name :: tags -> - let marker = state.pp_mark_close_tag tag_name in - pp_output_string state marker; - state.pp_mark_stack <- tags - | _ -> () (* No more tag to close. *) - end;; - -(* Print if token size is known or printing is delayed. - Size is known when not negative. - Printing is delayed when the text waiting in the queue requires - more room to format than exists on the current line. *) -let rec advance_left state = - try - match peek_queue state.pp_queue with - {elem_size = size; token = tok; length = len} -> - let size = int_of_size size in - if not - (size < 0 && - (state.pp_right_total - state.pp_left_total < state.pp_space_left)) - then begin - ignore(take_queue state.pp_queue); - format_pp_token state (if size < 0 then pp_infinity else size) tok; - state.pp_left_total <- len + state.pp_left_total; - advance_left state - end - with Empty_queue -> ();; - -let enqueue_advance state tok = pp_enqueue state tok; advance_left state;; - -(* To enqueue a string : try to advance. *) -let make_queue_elem size tok len = - {elem_size = size; token = tok; length = len};; - -let enqueue_string_as state size s = - let len = int_of_size size in - enqueue_advance state (make_queue_elem size (Pp_text s) len);; - -let enqueue_string state s = - let len = String.length s in - enqueue_string_as state (size_of_int len) s;; - -(* Routines for scan stack - determine sizes of blocks. *) - -(* The scan_stack is never empty. *) -let scan_stack_bottom = - let q_elem = make_queue_elem (size_of_int (-1)) (Pp_text "") 0 in - [Scan_elem (-1, q_elem)];; - -(* Set size of blocks on scan stack: - if ty = true then size of break is set else size of block is set; - in each case pp_scan_stack is popped. *) -let clear_scan_stack state = state.pp_scan_stack <- scan_stack_bottom;; - -(* Pattern matching on scan stack is exhaustive, - since scan_stack is never empty. - Pattern matching on token in scan stack is also exhaustive, - since scan_push is used on breaks and opening of boxes. *) -let set_size state ty = - match state.pp_scan_stack with - | Scan_elem - (left_tot, - ({elem_size = size; token = tok} as queue_elem)) :: t -> - let size = int_of_size size in - (* test if scan stack contains any data that is not obsolete. *) - if left_tot < state.pp_left_total then clear_scan_stack state else - begin match tok with - | Pp_break (_, _) | Pp_tbreak (_, _) -> - if ty then - begin - queue_elem.elem_size <- size_of_int (state.pp_right_total + size); - state.pp_scan_stack <- t - end - | Pp_begin (_, _) -> - if not ty then - begin - queue_elem.elem_size <- size_of_int (state.pp_right_total + size); - state.pp_scan_stack <- t - end - | _ -> () (* scan_push is only used for breaks and boxes. *) - end - | _ -> () (* scan_stack is never empty. *);; - -(* Push a token on scan stack. If b is true set_size is called. *) -let scan_push state b tok = - pp_enqueue state tok; - if b then set_size state true; - state.pp_scan_stack <- - Scan_elem (state.pp_right_total, tok) :: state.pp_scan_stack;; - -(* To open a new block : - the user may set the depth bound pp_max_boxes - any text nested deeper is printed as the ellipsis string. *) -let pp_open_box_gen state indent br_ty = - state.pp_curr_depth <- state.pp_curr_depth + 1; - if state.pp_curr_depth < state.pp_max_boxes then - let elem = - make_queue_elem - (size_of_int (- state.pp_right_total)) - (Pp_begin (indent, br_ty)) - 0 in - scan_push state false elem else - if state.pp_curr_depth = state.pp_max_boxes - then enqueue_string state state.pp_ellipsis;; - -(* The box which is always opened. *) -let pp_open_sys_box state = pp_open_box_gen state 0 Pp_hovbox;; - -(* Close a block, setting sizes of its subblocks. *) -let pp_close_box state () = - if state.pp_curr_depth > 1 then - begin - if state.pp_curr_depth < state.pp_max_boxes then - begin - pp_enqueue state - {elem_size = size_of_int 0; token = Pp_end; length = 0}; - set_size state true; set_size state false - end; - state.pp_curr_depth <- state.pp_curr_depth - 1; - end;; - -(* Open a tag, pushing it on the tag stack. *) -let pp_open_tag state tag_name = - if state.pp_print_tags then begin - state.pp_tag_stack <- tag_name :: state.pp_tag_stack; - state.pp_print_open_tag tag_name end; - if state.pp_mark_tags then - pp_enqueue state - {elem_size = size_of_int 0; token = Pp_open_tag tag_name; length = 0};; - -(* Close a tag, popping it from the tag stack. *) -let pp_close_tag state () = - if state.pp_mark_tags then - pp_enqueue state - {elem_size = size_of_int 0; token = Pp_close_tag; length = 0}; - if state.pp_print_tags then - begin match state.pp_tag_stack with - | tag_name :: tags -> - state.pp_print_close_tag tag_name; - state.pp_tag_stack <- tags - | _ -> () (* No more tag to close. *) - end;; - -let pp_set_print_tags state b = state.pp_print_tags <- b;; -let pp_set_mark_tags state b = state.pp_mark_tags <- b;; -let pp_get_print_tags state () = state.pp_print_tags;; -let pp_get_mark_tags state () = state.pp_mark_tags;; -let pp_set_tags state b = pp_set_print_tags state b; pp_set_mark_tags state b;; - -let pp_get_formatter_tag_functions state () = { - mark_open_tag = state.pp_mark_open_tag; - mark_close_tag = state.pp_mark_close_tag; - print_open_tag = state.pp_print_open_tag; - print_close_tag = state.pp_print_close_tag; -};; - -let pp_set_formatter_tag_functions state { - mark_open_tag = mot; - mark_close_tag = mct; - print_open_tag = pot; - print_close_tag = pct; - } = - state.pp_mark_open_tag <- mot; - state.pp_mark_close_tag <- mct; - state.pp_print_open_tag <- pot; - state.pp_print_close_tag <- pct;; - -(* Initialize pretty-printer. *) -let pp_rinit state = - pp_clear_queue state; - clear_scan_stack state; - state.pp_format_stack <- []; - state.pp_tbox_stack <- []; - state.pp_tag_stack <- []; - state.pp_mark_stack <- []; - state.pp_current_indent <- 0; - state.pp_curr_depth <- 0; - state.pp_space_left <- state.pp_margin; - pp_open_sys_box state;; - -(* Flushing pretty-printer queue. *) -let pp_flush_queue state b = - while state.pp_curr_depth > 1 do - pp_close_box state () - done; - state.pp_right_total <- pp_infinity; - advance_left state; - if b then pp_output_newline state; - pp_rinit state;; - -(************************************************************** - - Procedures to format objects, and use boxes - - **************************************************************) - -(* To format a string. *) -let pp_print_as_size state size s = - if state.pp_curr_depth < state.pp_max_boxes - then enqueue_string_as state size s;; - -let pp_print_as state isize s = - pp_print_as_size state (size_of_int isize) s;; - -let pp_print_string state s = - pp_print_as state (String.length s) s;; - -(* To format an integer. *) -let pp_print_int state i = pp_print_string state (string_of_int i);; - -(* To format a float. *) -let pp_print_float state f = pp_print_string state (string_of_float f);; - -(* To format a boolean. *) -let pp_print_bool state b = pp_print_string state (string_of_bool b);; - -(* To format a char. *) -let pp_print_char state c = - let s = String.create 1 in - s.[0] <- c; - pp_print_as state 1 s;; - -(* Opening boxes. *) -let pp_open_hbox state () = pp_open_box_gen state 0 Pp_hbox -and pp_open_vbox state indent = pp_open_box_gen state indent Pp_vbox - -and pp_open_hvbox state indent = pp_open_box_gen state indent Pp_hvbox -and pp_open_hovbox state indent = pp_open_box_gen state indent Pp_hovbox -and pp_open_box state indent = pp_open_box_gen state indent Pp_box;; - -(* Print a new line after printing all queued text - (same for print_flush but without a newline). *) -let pp_print_newline state () = - pp_flush_queue state true; state.pp_flush_function () -and pp_print_flush state () = - pp_flush_queue state false; state.pp_flush_function ();; - -(* To get a newline when one does not want to close the current block. *) -let pp_force_newline state () = - if state.pp_curr_depth < state.pp_max_boxes then - enqueue_advance state (make_queue_elem (size_of_int 0) Pp_newline 0);; - -(* To format something if the line has just been broken. *) -let pp_print_if_newline state () = - if state.pp_curr_depth < state.pp_max_boxes then - enqueue_advance state (make_queue_elem (size_of_int 0) Pp_if_newline 0);; - -(* Breaks: indicate where a block may be broken. - If line is broken then offset is added to the indentation of the current - block else (the value of) width blanks are printed. - To do (?) : add a maximum width and offset value. *) -let pp_print_break state width offset = - if state.pp_curr_depth < state.pp_max_boxes then - let elem = - make_queue_elem - (size_of_int (- state.pp_right_total)) - (Pp_break (width, offset)) - width in - scan_push state true elem;; - -let pp_print_space state () = pp_print_break state 1 0 -and pp_print_cut state () = pp_print_break state 0 0;; - -(* Tabulation boxes. *) -let pp_open_tbox state () = - state.pp_curr_depth <- state.pp_curr_depth + 1; - if state.pp_curr_depth < state.pp_max_boxes then - let elem = - make_queue_elem (size_of_int 0) (Pp_tbegin (Pp_tbox (ref []))) 0 in - enqueue_advance state elem;; - -(* Close a tabulation block. *) -let pp_close_tbox state () = - if state.pp_curr_depth > 1 then begin - if state.pp_curr_depth < state.pp_max_boxes then - let elem = make_queue_elem (size_of_int 0) Pp_tend 0 in - enqueue_advance state elem; - state.pp_curr_depth <- state.pp_curr_depth - 1 end;; - -(* Print a tabulation break. *) -let pp_print_tbreak state width offset = - if state.pp_curr_depth < state.pp_max_boxes then - let elem = - make_queue_elem - (size_of_int (- state.pp_right_total)) - (Pp_tbreak (width, offset)) - width in - scan_push state true elem;; - -let pp_print_tab state () = pp_print_tbreak state 0 0;; - -let pp_set_tab state () = - if state.pp_curr_depth < state.pp_max_boxes then - let elem = - make_queue_elem (size_of_int 0) Pp_stab 0 in - enqueue_advance state elem;; - -(************************************************************** - - Procedures to control the pretty-printers - - **************************************************************) - -(* Fit max_boxes. *) -let pp_set_max_boxes state n = if n > 1 then state.pp_max_boxes <- n;; - -(* To know the current maximum number of boxes allowed. *) -let pp_get_max_boxes state () = state.pp_max_boxes;; - -let pp_over_max_boxes state () = state.pp_curr_depth = state.pp_max_boxes;; - -(* Ellipsis. *) -let pp_set_ellipsis_text state s = state.pp_ellipsis <- s -and pp_get_ellipsis_text state () = state.pp_ellipsis;; - -(* To set the margin of pretty-printer. *) -let pp_limit n = - if n < pp_infinity then n else pred pp_infinity;; - -let pp_set_min_space_left state n = - if n >= 1 then - let n = pp_limit n in - state.pp_min_space_left <- n; - state.pp_max_indent <- state.pp_margin - state.pp_min_space_left; - pp_rinit state;; - -(* Initially, we have : - pp_max_indent = pp_margin - pp_min_space_left, and - pp_space_left = pp_margin. *) -let pp_set_max_indent state n = - pp_set_min_space_left state (state.pp_margin - n);; -let pp_get_max_indent state () = state.pp_max_indent;; - -let pp_set_margin state n = - if n >= 1 then - let n = pp_limit n in - state.pp_margin <- n; - let new_max_indent = - (* Try to maintain max_indent to its actual value. *) - if state.pp_max_indent <= state.pp_margin - then state.pp_max_indent else - (* If possible maintain pp_min_space_left to its actual value, - if this leads to a too small max_indent, take half of the - new margin, if it is greater than 1. *) - max (max (state.pp_margin - state.pp_min_space_left) - (state.pp_margin / 2)) 1 in - (* Rebuild invariants. *) - pp_set_max_indent state new_max_indent;; - -let pp_get_margin state () = state.pp_margin;; - -let pp_set_formatter_output_functions state f g = - state.pp_output_function <- f; state.pp_flush_function <- g;; -let pp_get_formatter_output_functions state () = - (state.pp_output_function, state.pp_flush_function);; - -let pp_set_all_formatter_output_functions state - ~out:f ~flush:g ~newline:h ~spaces:i = - pp_set_formatter_output_functions state f g; - state.pp_output_newline <- (function () -> h ()); - state.pp_output_spaces <- (function n -> i n);; -let pp_get_all_formatter_output_functions state () = - (state.pp_output_function, state.pp_flush_function, - state.pp_output_newline, state.pp_output_spaces);; - -let pp_set_formatter_out_channel state os = - state.pp_output_function <- output os; - state.pp_flush_function <- (fun () -> flush os);; - -(************************************************************** - - Creation of specific formatters - - **************************************************************) - -let default_pp_mark_open_tag s = "<" ^ s ^ ">";; -let default_pp_mark_close_tag s = "";; - -let default_pp_print_open_tag s = ();; -let default_pp_print_close_tag = default_pp_print_open_tag;; - -let pp_make_formatter f g h i = - (* The initial state of the formatter contains a dummy box. *) - let pp_q = make_queue () in - let sys_tok = - make_queue_elem (size_of_int (-1)) (Pp_begin (0, Pp_hovbox)) 0 in - add_queue sys_tok pp_q; - let sys_scan_stack = - (Scan_elem (1, sys_tok)) :: scan_stack_bottom in - {pp_scan_stack = sys_scan_stack; - pp_format_stack = []; - pp_tbox_stack = []; - pp_tag_stack = []; - pp_mark_stack = []; - pp_margin = 78; - pp_min_space_left = 10; - pp_max_indent = 78 - 10; - pp_space_left = 78; - pp_current_indent = 0; - pp_is_new_line = true; - pp_left_total = 1; - pp_right_total = 1; - pp_curr_depth = 1; - pp_max_boxes = max_int; - pp_ellipsis = "."; - pp_output_function = f; - pp_flush_function = g; - pp_output_newline = h; - pp_output_spaces = i; - pp_print_tags = false; - pp_mark_tags = false; - pp_mark_open_tag = default_pp_mark_open_tag; - pp_mark_close_tag = default_pp_mark_close_tag; - pp_print_open_tag = default_pp_print_open_tag; - pp_print_close_tag = default_pp_print_close_tag; - pp_queue = pp_q - };; - -(* Default function to output spaces. *) -let blank_line = String.make 80 ' ';; -let rec display_blanks state n = - if n > 0 then - if n <= 80 then state.pp_output_function blank_line 0 n else - begin - state.pp_output_function blank_line 0 80; - display_blanks state (n - 80) - end;; - -(* Default function to output new lines. *) -let display_newline state () = state.pp_output_function "\n" 0 1;; - -let make_formatter f g = - let ff = pp_make_formatter f g ignore ignore in - ff.pp_output_newline <- display_newline ff; - ff.pp_output_spaces <- display_blanks ff; - ff;; - -let formatter_of_out_channel oc = - make_formatter (output oc) (fun () -> flush oc);; - -let formatter_of_buffer b = - make_formatter (Buffer.add_substring b) ignore;; - -let stdbuf = Buffer.create 512;; - -let str_formatter = formatter_of_buffer stdbuf;; -let std_formatter = formatter_of_out_channel stdout;; -let err_formatter = formatter_of_out_channel stderr;; - -let flush_str_formatter () = - pp_flush_queue str_formatter false; - let s = Buffer.contents stdbuf in - Buffer.reset stdbuf; - s;; - -(************************************************************** - - Basic functions on the standard formatter - - **************************************************************) - -let open_hbox = pp_open_hbox std_formatter -and open_vbox = pp_open_vbox std_formatter -and open_hvbox = pp_open_hvbox std_formatter -and open_hovbox = pp_open_hovbox std_formatter -and open_box = pp_open_box std_formatter -and close_box = pp_close_box std_formatter -and open_tag = pp_open_tag std_formatter -and close_tag = pp_close_tag std_formatter -and print_as = pp_print_as std_formatter -and print_string = pp_print_string std_formatter -and print_int = pp_print_int std_formatter -and print_float = pp_print_float std_formatter -and print_char = pp_print_char std_formatter -and print_bool = pp_print_bool std_formatter -and print_break = pp_print_break std_formatter -and print_cut = pp_print_cut std_formatter -and print_space = pp_print_space std_formatter -and force_newline = pp_force_newline std_formatter -and print_flush = pp_print_flush std_formatter -and print_newline = pp_print_newline std_formatter -and print_if_newline = pp_print_if_newline std_formatter - -and open_tbox = pp_open_tbox std_formatter -and close_tbox = pp_close_tbox std_formatter -and print_tbreak = pp_print_tbreak std_formatter - -and set_tab = pp_set_tab std_formatter -and print_tab = pp_print_tab std_formatter - -and set_margin = pp_set_margin std_formatter -and get_margin = pp_get_margin std_formatter - -and set_max_indent = pp_set_max_indent std_formatter -and get_max_indent = pp_get_max_indent std_formatter - -and set_max_boxes = pp_set_max_boxes std_formatter -and get_max_boxes = pp_get_max_boxes std_formatter -and over_max_boxes = pp_over_max_boxes std_formatter - -and set_ellipsis_text = pp_set_ellipsis_text std_formatter -and get_ellipsis_text = pp_get_ellipsis_text std_formatter - -and set_formatter_out_channel = - pp_set_formatter_out_channel std_formatter - -and set_formatter_output_functions = - pp_set_formatter_output_functions std_formatter -and get_formatter_output_functions = - pp_get_formatter_output_functions std_formatter - -and set_all_formatter_output_functions = - pp_set_all_formatter_output_functions std_formatter -and get_all_formatter_output_functions = - pp_get_all_formatter_output_functions std_formatter - -and set_formatter_tag_functions = - pp_set_formatter_tag_functions std_formatter -and get_formatter_tag_functions = - pp_get_formatter_tag_functions std_formatter -and set_print_tags = - pp_set_print_tags std_formatter -and get_print_tags = - pp_get_print_tags std_formatter -and set_mark_tags = - pp_set_mark_tags std_formatter -and get_mark_tags = - pp_get_mark_tags std_formatter -and set_tags = - pp_set_tags std_formatter -;; - - -(************************************************************** - - Printf implementation. - - **************************************************************) - -(* Error messages when processing formats. *) - -(* Trailer: giving up at character number ... *) -let giving_up mess fmt i = - "fprintf: " ^ mess ^ " ``" ^ fmt ^ "'', \ - giving up at character number " ^ string_of_int i ^ - (if i < String.length fmt - then " (" ^ String.make 1 fmt.[i] ^ ")." - else String.make 1 '.');; - -(* When an invalid format deserves a special error explanation. *) -let format_invalid_arg mess fmt i = invalid_arg (giving_up mess fmt i);; - -(* Standard invalid format. *) -let invalid_format fmt i = format_invalid_arg "bad format" fmt i;; - -(* Cannot find a valid integer into that format. *) -let invalid_integer fmt i = - invalid_arg (giving_up "bad integer specification" fmt i);; - -(* Finding an integer out of a sub-string of the format. *) -let format_int_of_string fmt i s = - let sz = - try int_of_string s with - | Failure s -> invalid_integer fmt i in - size_of_int sz;; - -(* Getting strings out of buffers. *) -let get_buffer_out b = - let s = Buffer.contents b in - Buffer.reset b; - s;; - -(* [ppf] is supposed to be a pretty-printer that outputs in buffer [b]: - to extract contents of [ppf] as a string we flush [ppf] and get the string - out of [b]. *) -let string_out b ppf = - pp_flush_queue ppf false; - get_buffer_out b;; - -(* Applies [printer] to a formatter that outputs on a fresh buffer, - then returns the resulting material. *) -let exstring printer arg = - let b = Buffer.create 512 in - let ppf = formatter_of_buffer b in - printer ppf arg; - string_out b ppf;; - -(* To turn out a character accumulator into the proper string result. *) -let implode_rev s0 = function - | [] -> s0 - | l -> String.concat "" (List.rev (s0 :: l));; - -external format_to_string : ('a, 'b, 'c, 'd) format4 -> string = "%identity";; - -(* [fprintf_out] is the printf-like function generator: given the - - [str] flag that tells if we are printing into a string, - - the [out] function that has to be called at the end of formatting, - it generates a [fprintf] function that takes as arguments a [ppf] - formatter and a printing format to print the rest of arguments - according to the format. - Regular [fprintf]-like functions of this module are obtained via partial - applications of [fprintf_out]. *) -let mkprintf str get_out = - let rec kprintf k fmt = - let fmt = format_to_string fmt in - let len = String.length fmt in - - let kpr fmt v = - let ppf = get_out fmt in - let print_as = ref None in - let pp_print_as_char c = - match !print_as with - | None -> pp_print_char ppf c - | Some size -> - pp_print_as_size ppf size (String.make 1 c); - print_as := None - and pp_print_as_string s = - match !print_as with - | None -> pp_print_string ppf s - | Some size -> - pp_print_as_size ppf size s; - print_as := None in - - let rec doprn n i = - if i >= len then Obj.magic (k ppf) else - match fmt.[i] with - | '%' -> - Printf.scan_format fmt v n i cont_s cont_a cont_t cont_f cont_m - | '@' -> - let i = succ i in - if i >= len then invalid_format fmt i else - begin match fmt.[i] with - | '[' -> - do_pp_open_box ppf n (succ i) - | ']' -> - pp_close_box ppf (); - doprn n (succ i) - | '{' -> - do_pp_open_tag ppf n (succ i) - | '}' -> - pp_close_tag ppf (); - doprn n (succ i) - | ' ' -> - pp_print_space ppf (); - doprn n (succ i) - | ',' -> - pp_print_cut ppf (); - doprn n (succ i) - | '?' -> - pp_print_flush ppf (); - doprn n (succ i) - | '.' -> - pp_print_newline ppf (); - doprn n (succ i) - | '\n' -> - pp_force_newline ppf (); - doprn n (succ i) - | ';' -> - do_pp_break ppf n (succ i) - | '<' -> - let got_size size n i = - print_as := Some size; - doprn n (skip_gt i) in - get_int n (succ i) got_size - | '@' as c -> - pp_print_as_char c; - doprn n (succ i) - | c -> invalid_format fmt i - end - | c -> - pp_print_as_char c; - doprn n (succ i) - - and cont_s n s i = - pp_print_as_string s; doprn n i - and cont_a n printer arg i = - if str then - pp_print_as_string ((Obj.magic printer : unit -> _ -> string) () arg) - else - printer ppf arg; - doprn n i - and cont_t n printer i = - if str then - pp_print_as_string ((Obj.magic printer : unit -> string) ()) - else - printer ppf; - doprn n i - and cont_f n i = - pp_print_flush ppf (); doprn n i - - and cont_m n sfmt i = - kprintf (Obj.magic (fun _ -> doprn n i)) sfmt - - and get_int n i c = - if i >= len then invalid_integer fmt i else - match fmt.[i] with - | ' ' -> get_int n (succ i) c - | '%' -> - let cont_s n s i = c (format_int_of_string fmt i s) n i - and cont_a n printer arg i = invalid_integer fmt i - and cont_t n printer i = invalid_integer fmt i - and cont_f n i = invalid_integer fmt i - and cont_m n sfmt i = invalid_integer fmt i in - Printf.scan_format fmt v n i cont_s cont_a cont_t cont_f cont_m - | _ -> - let rec get j = - if j >= len then invalid_integer fmt j else - match fmt.[j] with - | '0' .. '9' | '-' -> get (succ j) - | _ -> - let size = - if j = i then size_of_int 0 else - format_int_of_string fmt j (String.sub fmt i (j - i)) in - c size n j in - get i - - and skip_gt i = - if i >= len then invalid_format fmt i else - match fmt.[i] with - | ' ' -> skip_gt (succ i) - | '>' -> succ i - | _ -> invalid_format fmt i - - and get_box_kind i = - if i >= len then Pp_box, i else - match fmt.[i] with - | 'h' -> - let i = succ i in - if i >= len then Pp_hbox, i else - begin match fmt.[i] with - | 'o' -> - let i = succ i in - if i >= len then format_invalid_arg "bad box format" fmt i else - begin match fmt.[i] with - | 'v' -> Pp_hovbox, succ i - | c -> - format_invalid_arg - ("bad box name ho" ^ String.make 1 c) fmt i end - | 'v' -> Pp_hvbox, succ i - | c -> Pp_hbox, i - end - | 'b' -> Pp_box, succ i - | 'v' -> Pp_vbox, succ i - | _ -> Pp_box, i - - and get_tag_name n i c = - let rec get accu n i j = - if j >= len - then c (implode_rev (String.sub fmt i (j - i)) accu) n j else - match fmt.[j] with - | '>' -> c (implode_rev (String.sub fmt i (j - i)) accu) n j - | '%' -> - let s0 = String.sub fmt i (j - i) in - let cont_s n s i = get (s :: s0 :: accu) n i i - and cont_a n printer arg i = - let s = - if str - then (Obj.magic printer : unit -> _ -> string) () arg - else exstring printer arg in - get (s :: s0 :: accu) n i i - and cont_t n printer i = - let s = - if str - then (Obj.magic printer : unit -> string) () - else exstring (fun ppf () -> printer ppf) () in - get (s :: s0 :: accu) n i i - and cont_f n i = - format_invalid_arg "bad tag name specification" fmt i - and cont_m n sfmt i = - format_invalid_arg "bad tag name specification" fmt i in - Printf.scan_format fmt v n j cont_s cont_a cont_t cont_f cont_m - | c -> get accu n i (succ j) in - get [] n i i - - and do_pp_break ppf n i = - if i >= len then begin pp_print_space ppf (); doprn n i end else - match fmt.[i] with - | '<' -> - let rec got_nspaces nspaces n i = - get_int n i (got_offset nspaces) - and got_offset nspaces offset n i = - pp_print_break ppf (int_of_size nspaces) (int_of_size offset); - doprn n (skip_gt i) in - get_int n (succ i) got_nspaces - | c -> pp_print_space ppf (); doprn n i - - and do_pp_open_box ppf n i = - if i >= len then begin pp_open_box_gen ppf 0 Pp_box; doprn n i end else - match fmt.[i] with - | '<' -> - let kind, i = get_box_kind (succ i) in - let got_size size n i = - pp_open_box_gen ppf (int_of_size size) kind; - doprn n (skip_gt i) in - get_int n i got_size - | c -> pp_open_box_gen ppf 0 Pp_box; doprn n i - - and do_pp_open_tag ppf n i = - if i >= len then begin pp_open_tag ppf ""; doprn n i end else - match fmt.[i] with - | '<' -> - let got_name tag_name n i = - pp_open_tag ppf tag_name; - doprn n (skip_gt i) in - get_tag_name n (succ i) got_name - | c -> pp_open_tag ppf ""; doprn n i in - - doprn (Printf.index_of_int 0) 0 in - - Printf.kapr kpr fmt in - - kprintf;; - -(************************************************************** - - Defining [fprintf] and various flavors of [fprintf]. - - **************************************************************) - -let kfprintf k ppf = mkprintf false (fun _ -> ppf) k;; - -let fprintf ppf = kfprintf ignore ppf;; -let printf fmt = fprintf std_formatter fmt;; -let eprintf fmt = fprintf err_formatter fmt;; - -let kbprintf k b = - mkprintf false (fun _ -> formatter_of_buffer b) k;; - -let bprintf b = kbprintf ignore b;; - -let ksprintf k = - let b = Buffer.create 512 in - let k ppf = k (string_out b ppf) in - mkprintf true (fun _ -> formatter_of_buffer b) k;; - -let kprintf = ksprintf;; - -let sprintf fmt = ksprintf (fun s -> s) fmt;; - -at_exit print_flush;; diff --git a/tests/examplefiles/freefem.edp b/tests/examplefiles/freefem.edp deleted file mode 100644 index d4313338..00000000 --- a/tests/examplefiles/freefem.edp +++ /dev/null @@ -1,94 +0,0 @@ -// Example of problem solving in parallel - -// Usage: -// ff-mpirun -np 12 LaplacianParallel.edp (here 12 is the number of threads (command nproc to know that) -// Need FreeFem++ with PETSc - -// Parallel stuff -load "PETSc" -macro partitioner()metis// -macro dimension()2// -include "./macro_ddm.idp" - -macro def(i)[i]// -macro init(i)[i]// -//macro meshN()mesh// //these macro are defined in macro_ddm.idp -//macro intN()int2d// - -// Parameters -int nn = 500; -real L = 1.; -real H = 1.; - -func f = 1.; - -func Pk = P1; - -// Mesh -border b1(t=0, L){x=t; y=0; label=1;} -border b2(t=0, H){x=L; y=t; label=2;} -border b3(t=L, 0){x=t; y=H; label=3;} -border b4(t=H, 0){x=0; y=t; label=4;} - -meshN Th = buildmesh(b1(1) + b2(1) + b3(1) + b4(1)); //build a really coarse mesh (just to build the fespace later) -//meshN Th = square(1, 1, [L*x, H*y]); - -int[int] Wall = [1, 2, 3, 4]; - -// Fespace -fespace Uh(Th, Pk); - -// Mesh partition -int[int] ArrayIntersection; -int[int][int] RestrictionIntersection(0); -real[int] D; - -meshN ThBorder; -meshN ThGlobal = buildmesh(b1(nn*L) + b2(nn*H) + b3(nn*L) + b4(nn*H)); //build the mesh to partition -//meshN ThGlobal = square(nn*L, nn*H, [L*x, H*y]); -int InterfaceLabel = 10; -int Split = 1; -int Overlap = 1; -build(Th, ThBorder, ThGlobal, InterfaceLabel, Split, Overlap, D, ArrayIntersection, RestrictionIntersection, Uh, Pk, mpiCommWorld, false); //see macro_ddm.idp for detailed parameters - -// Macro -macro grad(u) [dx(u), dy(u)] // - -// Problem -varf vLaplacian (u, uh) //Problem in varf formulation mandatory - = intN(Th)( - grad(u)' * grad(uh) - ) - - intN(Th)( - f * uh - ) - + on(Wall, u=0) - ; - -matrix Laplacian = vLaplacian(Uh, Uh); //build the sequential matrix -real[int] LaplacianBoundary = vLaplacian(0, Uh);// and right hand side - -//// In sequential, you normally do that: -//// Solve -//Uh def(u)=init(0); -//u[] = Laplacian^-1 * LaplacianBoundary; - -//// Plot -//plot(u); - -// In parallel: -// Matrix construction -dmatrix PLaplacian(Laplacian, ArrayIntersection, RestrictionIntersection, D, bs=1); //build the parallel matrix -set(PLaplacian, sparams="-pc_type lu -pc_factor_mat_solver_package mumps"); //preconditioner LU and MUMPS solver (see PETSc doc for detailed parameters) - -// Solve -Uh def(u)=init(0); //define the unknown (must be defined after mesh partitioning) -u[] = PLaplacian^-1 * LaplacianBoundary; - -// Export results to vtk (there is not plot in parallel) -{ - fespace PV(Th, P1); - PV uu=u; - int[int] Order = [1]; - export("Result", Th, uu, Order, mpiCommWorld); -} diff --git a/tests/examplefiles/fucked_up.rb b/tests/examplefiles/fucked_up.rb deleted file mode 100644 index b1d0ee3b..00000000 --- a/tests/examplefiles/fucked_up.rb +++ /dev/null @@ -1,77 +0,0 @@ -# vim:ft=ruby - -events = Hash.new { |h, k| h[k] = [] } -DATA.read.split(/\n\n\n\s*/).each do |event| - name = event[/^.*/].sub(/http:.*/, '') - event[/\n.*/m].scan(/^([A-Z]{2}\S*)\s*(\S*)\s*(\S*)(\s*\S*)/) do |kind, day, daytime, comment| - events[ [day, daytime] ] << [kind, name + comment] - end -end - -conflicts = 0 -events.to_a.sort_by do |(day, daytime),| - [%w(Mo Di Mi Do Fr).index(day) || 0, daytime] -end.each do |(day, daytime), names| - if names.size > 1 - conflicts += 1 - print '!!! ' - end - print "#{day} #{daytime}: " - names.each { |kind, name| puts " #{kind} #{name}" } - puts -end - -puts '%d conflicts' % conflicts -puts '%d SWS' % (events.inject(0) { |sum, ((day, daytime),)| sum + (daytime[/\d+$/].to_i - daytime[/^\d+/].to_i) }) - -string = % foo # strange. huh? -print "Escape here: \n" -print 'Dont escape here: \n' - -__END__ -Informatik und Informationsgesellschaft I: Digitale Medien (32 214) -Computer lassen ihre eigentliche Bestimmung durch Multimedia und Vernetzung erkennen: Es sind digitale Medien, die alle bisherigen Massen- und Kommunikationsmedien simulieren, kopieren oder ersetzen können. Die kurze Geschichte elektronischer Medien vom Telegramm bis zum Fernsehen wird so zur Vorgeschichte des Computers als Medium. Der Prozess der Mediatisierung der Rechnernetze soll in Technik, Theorie und Praxis untersucht werden. Das PR soll die Techniken der ortsverteilten und zeitversetzten Lehre an Hand praktischer Übungen vorführen und untersuchen. -VL Di 15-17 wöch. RUD 25, 3.101 J. Koubek -VL Do 15-17 wöch. RUD 25, 3.101 -UE/PR Do 17-19 wöch. RUD 25, 3.101 J.-M. Loebel - - -Methoden und Modelle des Systementwurfs (32 223) -Gute Methoden zum Entwurf und zur Verifikation von Systemen sind ein Schlüssel für gute Software. Dieses Seminar betrachtet moderne Entwurfsmethoden. - VL Di 09-11 wöch. RUD 26, 0’313 W. Reisig - VL Do 09-11 wöch. RUD 26, 0’313 - UE Di 11-13 wöch. RUD 26, 0’313 - PR Di 13-15 wöch. RUD 26, 0’313 D. Weinberg - - -Komplexitätstheorie (32 229) -In dieser Vorlesung untersuchen wir eine Reihe von wichtigen algorithmischen Problemstellungen aus verschiedenen Bereichen der Informatik. Unser besonderes Interesse gilt dabei der Abschätzung der Rechenressourcen, die zu ihrer Lösung aufzubringen sind. Die Vorlesung bildet eine wichtige Grundlage für weiterführende Veranstaltungen in den Bereichen Algorithmen, Kryptologie, Algorithmisches Lernen und Algorithmisches Beweisen. - VL Di 09-11 wöch. RUD 26, 1’303 J. Köbler - VL Do 09-11 wöch. RUD 26, 1’305 - UE Do 11-13 wöch. RUD 26, 1’305 - - -Zuverlässige Systeme (32 234) -Mit zunehmender Verbreitung der Computertechnologie in immer mehr Bereichen des menschlichen Lebens wird die Zuverlässigkeit solcher Systeme zu einer immer zentraleren Frage. -Der Halbkurs "Zuverlässige Systeme" konzentriert sich auf folgende Schwerpunkte: Zuverlässigkeit, Fehlertoleranz, Responsivität, Messungen, Anwendungen, Systemmodelle und Techniken, Ausfallverhalten, Fehlermodelle, Schedulingtechniken, Software/Hardware - responsives Systemdesign, Analyse und Synthese, Bewertung, Fallstudien in Forschung und Industrie. -Der Halbkurs kann mit dem Halbkurs "Eigenschaften mobiler und eingebetteter Systeme" zu einem Projektkurs kombiniert werden. Ein gemeinsames Projekt begleitet beide Halbkurse. -VL Di 09-11 wöch. RUD 26, 1’308 M. Malek -VL Do 09-11 wöch. RUD 26, 1’308 -PR n.V. - - -Stochastik für InformatikerInnen (32 239) -Grundlagen der Wahrscheinlichkeitsrechnung, Diskrete und stetige Wahrscheinlichkeitsmodelle in der Informatik, Grenzwertsätze, Simulationsverfahren, Zufallszahlen, Statistische Schätz- und Testverfahren, Markoffsche Ketten, Simulated Annealing, Probabilistische Analyse von Algorithmen. -VL Mo 09-11 wöch. RUD 25, 3.101 W. Kössler -VL Mi 09-11 wöch. RUD 25, 3.101 -UE Mo 11-13 wöch. RUD 25, 3.101 - UE Mi 11-13 wöch. RUD 25. 3.101 - - -Geschichte der Informatik – Ausgewählte Kapitel (32 243) -VL Mi 13-15 wöch. RUD 25, 3.113 W. Coy - - -Aktuelle Themen der Theoretischen Informatik (32 260) -In diesem Seminar sollen wichtige aktuelle Veröffentlichungen aus der theoretischen Informatik gemeinsam erarbeitet werden. Genaueres wird erst kurz vor dem Seminar entschieden. Bei Interesse wenden Sie sich bitte möglichst frühzeitig an den Veranstalter. - SE Fr 09-11 wöch. RUD 26, 1’307 M. Grohe  diff --git a/tests/examplefiles/function.mu b/tests/examplefiles/function.mu deleted file mode 100644 index 46bb259d..00000000 --- a/tests/examplefiles/function.mu +++ /dev/null @@ -1 +0,0 @@ -a::b () diff --git a/tests/examplefiles/functional.rst b/tests/examplefiles/functional.rst deleted file mode 100644 index bfe67d10..00000000 --- a/tests/examplefiles/functional.rst +++ /dev/null @@ -1,1472 +0,0 @@ -Functional Programming HOWTO -================================ - -**Version 0.30** - -(This is a first draft. Please send comments/error -reports/suggestions to amk@amk.ca. This URL is probably not going to -be the final location of the document, so be careful about linking to -it -- you may want to add a disclaimer.) - -In this document, we'll take a tour of Python's features suitable for -implementing programs in a functional style. After an introduction to -the concepts of functional programming, we'll look at language -features such as iterators and generators and relevant library modules -such as ``itertools`` and ``functools``. - - -.. contents:: - -Introduction ----------------------- - -This section explains the basic concept of functional programming; if -you're just interested in learning about Python language features, -skip to the next section. - -Programming languages support decomposing problems in several different -ways: - -* Most programming languages are **procedural**: - programs are lists of instructions that tell the computer what to - do with the program's input. - C, Pascal, and even Unix shells are procedural languages. - -* In **declarative** languages, you write a specification that describes - the problem to be solved, and the language implementation figures out - how to perform the computation efficiently. SQL is the declarative - language you're most likely to be familiar with; a SQL query describes - the data set you want to retrieve, and the SQL engine decides whether to - scan tables or use indexes, which subclauses should be performed first, - etc. - -* **Object-oriented** programs manipulate collections of objects. - Objects have internal state and support methods that query or modify - this internal state in some way. Smalltalk and Java are - object-oriented languages. C++ and Python are languages that - support object-oriented programming, but don't force the use - of object-oriented features. - -* **Functional** programming decomposes a problem into a set of functions. - Ideally, functions only take inputs and produce outputs, and don't have any - internal state that affects the output produced for a given input. - Well-known functional languages include the ML family (Standard ML, - OCaml, and other variants) and Haskell. - -The designers of some computer languages have chosen one approach to -programming that's emphasized. This often makes it difficult to -write programs that use a different approach. Other languages are -multi-paradigm languages that support several different approaches. Lisp, -C++, and Python are multi-paradigm; you can write programs or -libraries that are largely procedural, object-oriented, or functional -in all of these languages. In a large program, different sections -might be written using different approaches; the GUI might be object-oriented -while the processing logic is procedural or functional, for example. - -In a functional program, input flows through a set of functions. Each -function operates on its input and produces some output. Functional -style frowns upon functions with side effects that modify internal -state or make other changes that aren't visible in the function's -return value. Functions that have no side effects at all are -called **purely functional**. -Avoiding side effects means not using data structures -that get updated as a program runs; every function's output -must only depend on its input. - -Some languages are very strict about purity and don't even have -assignment statements such as ``a=3`` or ``c = a + b``, but it's -difficult to avoid all side effects. Printing to the screen or -writing to a disk file are side effects, for example. For example, in -Python a ``print`` statement or a ``time.sleep(1)`` both return no -useful value; they're only called for their side effects of sending -some text to the screen or pausing execution for a second. - -Python programs written in functional style usually won't go to the -extreme of avoiding all I/O or all assignments; instead, they'll -provide a functional-appearing interface but will use non-functional -features internally. For example, the implementation of a function -will still use assignments to local variables, but won't modify global -variables or have other side effects. - -Functional programming can be considered the opposite of -object-oriented programming. Objects are little capsules containing -some internal state along with a collection of method calls that let -you modify this state, and programs consist of making the right set of -state changes. Functional programming wants to avoid state changes as -much as possible and works with data flowing between functions. In -Python you might combine the two approaches by writing functions that -take and return instances representing objects in your application -(e-mail messages, transactions, etc.). - -Functional design may seem like an odd constraint to work under. Why -should you avoid objects and side effects? There are theoretical and -practical advantages to the functional style: - -* Formal provability. -* Modularity. -* Composability. -* Ease of debugging and testing. - -Formal provability -'''''''''''''''''''''' - -A theoretical benefit is that it's easier to construct a mathematical proof -that a functional program is correct. - -For a long time researchers have been interested in finding ways to -mathematically prove programs correct. This is different from testing -a program on numerous inputs and concluding that its output is usually -correct, or reading a program's source code and concluding that the -code looks right; the goal is instead a rigorous proof that a program -produces the right result for all possible inputs. - -The technique used to prove programs correct is to write down -**invariants**, properties of the input data and of the program's -variables that are always true. For each line of code, you then show -that if invariants X and Y are true **before** the line is executed, -the slightly different invariants X' and Y' are true **after** -the line is executed. This continues until you reach the end of the -program, at which point the invariants should match the desired -conditions on the program's output. - -Functional programming's avoidance of assignments arose because -assignments are difficult to handle with this technique; -assignments can break invariants that were true before the assignment -without producing any new invariants that can be propagated onward. - -Unfortunately, proving programs correct is largely impractical and not -relevant to Python software. Even trivial programs require proofs that -are several pages long; the proof of correctness for a moderately -complicated program would be enormous, and few or none of the programs -you use daily (the Python interpreter, your XML parser, your web -browser) could be proven correct. Even if you wrote down or generated -a proof, there would then be the question of verifying the proof; -maybe there's an error in it, and you wrongly believe you've proved -the program correct. - -Modularity -'''''''''''''''''''''' - -A more practical benefit of functional programming is that it forces -you to break apart your problem into small pieces. Programs are more -modular as a result. It's easier to specify and write a small -function that does one thing than a large function that performs a -complicated transformation. Small functions are also easier to read -and to check for errors. - - -Ease of debugging and testing -'''''''''''''''''''''''''''''''''' - -Testing and debugging a functional-style program is easier. - -Debugging is simplified because functions are generally small and -clearly specified. When a program doesn't work, each function is an -interface point where you can check that the data are correct. You -can look at the intermediate inputs and outputs to quickly isolate the -function that's responsible for a bug. - -Testing is easier because each function is a potential subject for a -unit test. Functions don't depend on system state that needs to be -replicated before running a test; instead you only have to synthesize -the right input and then check that the output matches expectations. - - - -Composability -'''''''''''''''''''''' - -As you work on a functional-style program, you'll write a number of -functions with varying inputs and outputs. Some of these functions -will be unavoidably specialized to a particular application, but -others will be useful in a wide variety of programs. For example, a -function that takes a directory path and returns all the XML files in -the directory, or a function that takes a filename and returns its -contents, can be applied to many different situations. - -Over time you'll form a personal library of utilities. Often you'll -assemble new programs by arranging existing functions in a new -configuration and writing a few functions specialized for the current -task. - - - -Iterators ------------------------ - -I'll start by looking at a Python language feature that's an important -foundation for writing functional-style programs: iterators. - -An iterator is an object representing a stream of data; this object -returns the data one element at a time. A Python iterator must -support a method called ``next()`` that takes no arguments and always -returns the next element of the stream. If there are no more elements -in the stream, ``next()`` must raise the ``StopIteration`` exception. -Iterators don't have to be finite, though; it's perfectly reasonable -to write an iterator that produces an infinite stream of data. - -The built-in ``iter()`` function takes an arbitrary object and tries -to return an iterator that will return the object's contents or -elements, raising ``TypeError`` if the object doesn't support -iteration. Several of Python's built-in data types support iteration, -the most common being lists and dictionaries. An object is called -an **iterable** object if you can get an iterator for it. - -You can experiment with the iteration interface manually:: - - >>> L = [1,2,3] - >>> it = iter(L) - >>> print it - - >>> it.next() - 1 - >>> it.next() - 2 - >>> it.next() - 3 - >>> it.next() - Traceback (most recent call last): - File "", line 1, in ? - StopIteration - >>> - -Python expects iterable objects in several different contexts, the -most important being the ``for`` statement. In the statement ``for X in Y``, -Y must be an iterator or some object for which ``iter()`` can create -an iterator. These two statements are equivalent:: - - for i in iter(obj): - print i - - for i in obj: - print i - -Iterators can be materialized as lists or tuples by using the -``list()`` or ``tuple()`` constructor functions:: - - >>> L = [1,2,3] - >>> iterator = iter(L) - >>> t = tuple(iterator) - >>> t - (1, 2, 3) - -Sequence unpacking also supports iterators: if you know an iterator -will return N elements, you can unpack them into an N-tuple:: - - >>> L = [1,2,3] - >>> iterator = iter(L) - >>> a,b,c = iterator - >>> a,b,c - (1, 2, 3) - -Built-in functions such as ``max()`` and ``min()`` can take a single -iterator argument and will return the largest or smallest element. -The ``"in"`` and ``"not in"`` operators also support iterators: ``X in -iterator`` is true if X is found in the stream returned by the -iterator. You'll run into obvious problems if the iterator is -infinite; ``max()``, ``min()``, and ``"not in"`` will never return, and -if the element X never appears in the stream, the ``"in"`` operator -won't return either. - -Note that you can only go forward in an iterator; there's no way to -get the previous element, reset the iterator, or make a copy of it. -Iterator objects can optionally provide these additional capabilities, -but the iterator protocol only specifies the ``next()`` method. -Functions may therefore consume all of the iterator's output, and if -you need to do something different with the same stream, you'll have -to create a new iterator. - - - -Data Types That Support Iterators -''''''''''''''''''''''''''''''''''' - -We've already seen how lists and tuples support iterators. In fact, -any Python sequence type, such as strings, will automatically support -creation of an iterator. - -Calling ``iter()`` on a dictionary returns an iterator that will loop -over the dictionary's keys:: - - >>> m = {'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 5, 'Jun': 6, - ... 'Jul': 7, 'Aug': 8, 'Sep': 9, 'Oct': 10, 'Nov': 11, 'Dec': 12} - >>> for key in m: - ... print key, m[key] - Mar 3 - Feb 2 - Aug 8 - Sep 9 - May 5 - Jun 6 - Jul 7 - Jan 1 - Apr 4 - Nov 11 - Dec 12 - Oct 10 - -Note that the order is essentially random, because it's based on the -hash ordering of the objects in the dictionary. - -Applying ``iter()`` to a dictionary always loops over the keys, but -dictionaries have methods that return other iterators. If you want to -iterate over keys, values, or key/value pairs, you can explicitly call -the ``iterkeys()``, ``itervalues()``, or ``iteritems()`` methods to -get an appropriate iterator. - -The ``dict()`` constructor can accept an iterator that returns a -finite stream of ``(key, value)`` tuples:: - - >>> L = [('Italy', 'Rome'), ('France', 'Paris'), ('US', 'Washington DC')] - >>> dict(iter(L)) - {'Italy': 'Rome', 'US': 'Washington DC', 'France': 'Paris'} - -Files also support iteration by calling the ``readline()`` -method until there are no more lines in the file. This means you can -read each line of a file like this:: - - for line in file: - # do something for each line - ... - -Sets can take their contents from an iterable and let you iterate over -the set's elements:: - - S = set((2, 3, 5, 7, 11, 13)) - for i in S: - print i - - - -Generator expressions and list comprehensions ----------------------------------------------------- - -Two common operations on an iterator's output are 1) performing some -operation for every element, 2) selecting a subset of elements that -meet some condition. For example, given a list of strings, you might -want to strip off trailing whitespace from each line or extract all -the strings containing a given substring. - -List comprehensions and generator expressions (short form: "listcomps" -and "genexps") are a concise notation for such operations, borrowed -from the functional programming language Haskell -(http://www.haskell.org). You can strip all the whitespace from a -stream of strings with the following code:: - - line_list = [' line 1\n', 'line 2 \n', ...] - - # Generator expression -- returns iterator - stripped_iter = (line.strip() for line in line_list) - - # List comprehension -- returns list - stripped_list = [line.strip() for line in line_list] - -You can select only certain elements by adding an ``"if"`` condition:: - - stripped_list = [line.strip() for line in line_list - if line != ""] - -With a list comprehension, you get back a Python list; -``stripped_list`` is a list containing the resulting lines, not an -iterator. Generator expressions return an iterator that computes the -values as necessary, not needing to materialize all the values at -once. This means that list comprehensions aren't useful if you're -working with iterators that return an infinite stream or a very large -amount of data. Generator expressions are preferable in these -situations. - -Generator expressions are surrounded by parentheses ("()") and list -comprehensions are surrounded by square brackets ("[]"). Generator -expressions have the form:: - - ( expression for expr in sequence1 - if condition1 - for expr2 in sequence2 - if condition2 - for expr3 in sequence3 ... - if condition3 - for exprN in sequenceN - if conditionN ) - -Again, for a list comprehension only the outside brackets are -different (square brackets instead of parentheses). - -The elements of the generated output will be the successive values of -``expression``. The ``if`` clauses are all optional; if present, -``expression`` is only evaluated and added to the result when -``condition`` is true. - -Generator expressions always have to be written inside parentheses, -but the parentheses signalling a function call also count. If you -want to create an iterator that will be immediately passed to a -function you can write:: - - obj_total = sum(obj.count for obj in list_all_objects()) - -The ``for...in`` clauses contain the sequences to be iterated over. -The sequences do not have to be the same length, because they are -iterated over from left to right, **not** in parallel. For each -element in ``sequence1``, ``sequence2`` is looped over from the -beginning. ``sequence3`` is then looped over for each -resulting pair of elements from ``sequence1`` and ``sequence2``. - -To put it another way, a list comprehension or generator expression is -equivalent to the following Python code:: - - for expr1 in sequence1: - if not (condition1): - continue # Skip this element - for expr2 in sequence2: - if not (condition2): - continue # Skip this element - ... - for exprN in sequenceN: - if not (conditionN): - continue # Skip this element - - # Output the value of - # the expression. - -This means that when there are multiple ``for...in`` clauses but no -``if`` clauses, the length of the resulting output will be equal to -the product of the lengths of all the sequences. If you have two -lists of length 3, the output list is 9 elements long:: - - seq1 = 'abc' - seq2 = (1,2,3) - >>> [ (x,y) for x in seq1 for y in seq2] - [('a', 1), ('a', 2), ('a', 3), - ('b', 1), ('b', 2), ('b', 3), - ('c', 1), ('c', 2), ('c', 3)] - -To avoid introducing an ambiguity into Python's grammar, if -``expression`` is creating a tuple, it must be surrounded with -parentheses. The first list comprehension below is a syntax error, -while the second one is correct:: - - # Syntax error - [ x,y for x in seq1 for y in seq2] - # Correct - [ (x,y) for x in seq1 for y in seq2] - - -Generators ------------------------ - -Generators are a special class of functions that simplify the task of -writing iterators. Regular functions compute a value and return it, -but generators return an iterator that returns a stream of values. - -You're doubtless familiar with how regular function calls work in -Python or C. When you call a function, it gets a private namespace -where its local variables are created. When the function reaches a -``return`` statement, the local variables are destroyed and the -value is returned to the caller. A later call to the same function -creates a new private namespace and a fresh set of local -variables. But, what if the local variables weren't thrown away on -exiting a function? What if you could later resume the function where -it left off? This is what generators provide; they can be thought of -as resumable functions. - -Here's the simplest example of a generator function:: - - def generate_ints(N): - for i in range(N): - yield i - -Any function containing a ``yield`` keyword is a generator function; -this is detected by Python's bytecode compiler which compiles the -function specially as a result. - -When you call a generator function, it doesn't return a single value; -instead it returns a generator object that supports the iterator -protocol. On executing the ``yield`` expression, the generator -outputs the value of ``i``, similar to a ``return`` -statement. The big difference between ``yield`` and a -``return`` statement is that on reaching a ``yield`` the -generator's state of execution is suspended and local variables are -preserved. On the next call to the generator's ``.next()`` method, -the function will resume executing. - -Here's a sample usage of the ``generate_ints()`` generator:: - - >>> gen = generate_ints(3) - >>> gen - - >>> gen.next() - 0 - >>> gen.next() - 1 - >>> gen.next() - 2 - >>> gen.next() - Traceback (most recent call last): - File "stdin", line 1, in ? - File "stdin", line 2, in generate_ints - StopIteration - -You could equally write ``for i in generate_ints(5)``, or -``a,b,c = generate_ints(3)``. - -Inside a generator function, the ``return`` statement can only be used -without a value, and signals the end of the procession of values; -after executing a ``return`` the generator cannot return any further -values. ``return`` with a value, such as ``return 5``, is a syntax -error inside a generator function. The end of the generator's results -can also be indicated by raising ``StopIteration`` manually, or by -just letting the flow of execution fall off the bottom of the -function. - -You could achieve the effect of generators manually by writing your -own class and storing all the local variables of the generator as -instance variables. For example, returning a list of integers could -be done by setting ``self.count`` to 0, and having the -``next()`` method increment ``self.count`` and return it. -However, for a moderately complicated generator, writing a -corresponding class can be much messier. - -The test suite included with Python's library, ``test_generators.py``, -contains a number of more interesting examples. Here's one generator -that implements an in-order traversal of a tree using generators -recursively. - -:: - - # A recursive generator that generates Tree leaves in in-order. - def inorder(t): - if t: - for x in inorder(t.left): - yield x - - yield t.label - - for x in inorder(t.right): - yield x - -Two other examples in ``test_generators.py`` produce -solutions for the N-Queens problem (placing N queens on an NxN -chess board so that no queen threatens another) and the Knight's Tour -(finding a route that takes a knight to every square of an NxN chessboard -without visiting any square twice). - - - -Passing values into a generator -'''''''''''''''''''''''''''''''''''''''''''''' - -In Python 2.4 and earlier, generators only produced output. Once a -generator's code was invoked to create an iterator, there was no way to -pass any new information into the function when its execution is -resumed. You could hack together this ability by making the -generator look at a global variable or by passing in some mutable object -that callers then modify, but these approaches are messy. - -In Python 2.5 there's a simple way to pass values into a generator. -``yield`` became an expression, returning a value that can be assigned -to a variable or otherwise operated on:: - - val = (yield i) - -I recommend that you **always** put parentheses around a ``yield`` -expression when you're doing something with the returned value, as in -the above example. The parentheses aren't always necessary, but it's -easier to always add them instead of having to remember when they're -needed. - -(PEP 342 explains the exact rules, which are that a -``yield``-expression must always be parenthesized except when it -occurs at the top-level expression on the right-hand side of an -assignment. This means you can write ``val = yield i`` but have to -use parentheses when there's an operation, as in ``val = (yield i) -+ 12``.) - -Values are sent into a generator by calling its -``send(value)`` method. This method resumes the -generator's code and the ``yield`` expression returns the specified -value. If the regular ``next()`` method is called, the -``yield`` returns ``None``. - -Here's a simple counter that increments by 1 and allows changing the -value of the internal counter. - -:: - - def counter (maximum): - i = 0 - while i < maximum: - val = (yield i) - # If value provided, change counter - if val is not None: - i = val - else: - i += 1 - -And here's an example of changing the counter: - - >>> it = counter(10) - >>> print it.next() - 0 - >>> print it.next() - 1 - >>> print it.send(8) - 8 - >>> print it.next() - 9 - >>> print it.next() - Traceback (most recent call last): - File ``t.py'', line 15, in ? - print it.next() - StopIteration - -Because ``yield`` will often be returning ``None``, you -should always check for this case. Don't just use its value in -expressions unless you're sure that the ``send()`` method -will be the only method used resume your generator function. - -In addition to ``send()``, there are two other new methods on -generators: - -* ``throw(type, value=None, traceback=None)`` is used to raise an exception inside the - generator; the exception is raised by the ``yield`` expression - where the generator's execution is paused. - -* ``close()`` raises a ``GeneratorExit`` - exception inside the generator to terminate the iteration. - On receiving this - exception, the generator's code must either raise - ``GeneratorExit`` or ``StopIteration``; catching the - exception and doing anything else is illegal and will trigger - a ``RuntimeError``. ``close()`` will also be called by - Python's garbage collector when the generator is garbage-collected. - - If you need to run cleanup code when a ``GeneratorExit`` occurs, - I suggest using a ``try: ... finally:`` suite instead of - catching ``GeneratorExit``. - -The cumulative effect of these changes is to turn generators from -one-way producers of information into both producers and consumers. - -Generators also become **coroutines**, a more generalized form of -subroutines. Subroutines are entered at one point and exited at -another point (the top of the function, and a ``return`` -statement), but coroutines can be entered, exited, and resumed at -many different points (the ``yield`` statements). - - -Built-in functions ----------------------------------------------- - -Let's look in more detail at built-in functions often used with iterators. - -Two Python's built-in functions, ``map()`` and ``filter()``, are -somewhat obsolete; they duplicate the features of list comprehensions -but return actual lists instead of iterators. - -``map(f, iterA, iterB, ...)`` returns a list containing ``f(iterA[0], -iterB[0]), f(iterA[1], iterB[1]), f(iterA[2], iterB[2]), ...``. - -:: - - def upper(s): - return s.upper() - map(upper, ['sentence', 'fragment']) => - ['SENTENCE', 'FRAGMENT'] - - [upper(s) for s in ['sentence', 'fragment']] => - ['SENTENCE', 'FRAGMENT'] - -As shown above, you can achieve the same effect with a list -comprehension. The ``itertools.imap()`` function does the same thing -but can handle infinite iterators; it'll be discussed later, in the section on -the ``itertools`` module. - -``filter(predicate, iter)`` returns a list -that contains all the sequence elements that meet a certain condition, -and is similarly duplicated by list comprehensions. -A **predicate** is a function that returns the truth value of -some condition; for use with ``filter()``, the predicate must take a -single value. - -:: - - def is_even(x): - return (x % 2) == 0 - - filter(is_even, range(10)) => - [0, 2, 4, 6, 8] - -This can also be written as a list comprehension:: - - >>> [x for x in range(10) if is_even(x)] - [0, 2, 4, 6, 8] - -``filter()`` also has a counterpart in the ``itertools`` module, -``itertools.ifilter()``, that returns an iterator and -can therefore handle infinite sequences just as ``itertools.imap()`` can. - -``reduce(func, iter, [initial_value])`` doesn't have a counterpart in -the ``itertools`` module because it cumulatively performs an operation -on all the iterable's elements and therefore can't be applied to -infinite iterables. ``func`` must be a function that takes two elements -and returns a single value. ``reduce()`` takes the first two elements -A and B returned by the iterator and calculates ``func(A, B)``. It -then requests the third element, C, calculates ``func(func(A, B), -C)``, combines this result with the fourth element returned, and -continues until the iterable is exhausted. If the iterable returns no -values at all, a ``TypeError`` exception is raised. If the initial -value is supplied, it's used as a starting point and -``func(initial_value, A)`` is the first calculation. - -:: - - import operator - reduce(operator.concat, ['A', 'BB', 'C']) => - 'ABBC' - reduce(operator.concat, []) => - TypeError: reduce() of empty sequence with no initial value - reduce(operator.mul, [1,2,3], 1) => - 6 - reduce(operator.mul, [], 1) => - 1 - -If you use ``operator.add`` with ``reduce()``, you'll add up all the -elements of the iterable. This case is so common that there's a special -built-in called ``sum()`` to compute it:: - - reduce(operator.add, [1,2,3,4], 0) => - 10 - sum([1,2,3,4]) => - 10 - sum([]) => - 0 - -For many uses of ``reduce()``, though, it can be clearer to just write -the obvious ``for`` loop:: - - # Instead of: - product = reduce(operator.mul, [1,2,3], 1) - - # You can write: - product = 1 - for i in [1,2,3]: - product *= i - - -``enumerate(iter)`` counts off the elements in the iterable, returning -2-tuples containing the count and each element. - -:: - - enumerate(['subject', 'verb', 'object']) => - (0, 'subject'), (1, 'verb'), (2, 'object') - -``enumerate()`` is often used when looping through a list -and recording the indexes at which certain conditions are met:: - - f = open('data.txt', 'r') - for i, line in enumerate(f): - if line.strip() == '': - print 'Blank line at line #%i' % i - -``sorted(iterable, [cmp=None], [key=None], [reverse=False)`` -collects all the elements of the iterable into a list, sorts -the list, and returns the sorted result. The ``cmp``, ``key``, -and ``reverse`` arguments are passed through to the -constructed list's ``.sort()`` method. - -:: - - import random - # Generate 8 random numbers between [0, 10000) - rand_list = random.sample(range(10000), 8) - rand_list => - [769, 7953, 9828, 6431, 8442, 9878, 6213, 2207] - sorted(rand_list) => - [769, 2207, 6213, 6431, 7953, 8442, 9828, 9878] - sorted(rand_list, reverse=True) => - [9878, 9828, 8442, 7953, 6431, 6213, 2207, 769] - -(For a more detailed discussion of sorting, see the Sorting mini-HOWTO -in the Python wiki at http://wiki.python.org/moin/HowTo/Sorting.) - -The ``any(iter)`` and ``all(iter)`` built-ins look at -the truth values of an iterable's contents. ``any()`` returns -True if any element in the iterable is a true value, and ``all()`` -returns True if all of the elements are true values:: - - any([0,1,0]) => - True - any([0,0,0]) => - False - any([1,1,1]) => - True - all([0,1,0]) => - False - all([0,0,0]) => - False - all([1,1,1]) => - True - - -Small functions and the lambda statement ----------------------------------------------- - -When writing functional-style programs, you'll often need little -functions that act as predicates or that combine elements in some way. - -If there's a Python built-in or a module function that's suitable, you -don't need to define a new function at all:: - - stripped_lines = [line.strip() for line in lines] - existing_files = filter(os.path.exists, file_list) - -If the function you need doesn't exist, you need to write it. One way -to write small functions is to use the ``lambda`` statement. ``lambda`` -takes a number of parameters and an expression combining these parameters, -and creates a small function that returns the value of the expression:: - - lowercase = lambda x: x.lower() - - print_assign = lambda name, value: name + '=' + str(value) - - adder = lambda x, y: x+y - -An alternative is to just use the ``def`` statement and define a -function in the usual way:: - - def lowercase(x): - return x.lower() - - def print_assign(name, value): - return name + '=' + str(value) - - def adder(x,y): - return x + y - -Which alternative is preferable? That's a style question; my usual -course is to avoid using ``lambda``. - -One reason for my preference is that ``lambda`` is quite limited in -the functions it can define. The result has to be computable as a -single expression, which means you can't have multiway -``if... elif... else`` comparisons or ``try... except`` statements. -If you try to do too much in a ``lambda`` statement, you'll end up -with an overly complicated expression that's hard to read. Quick, -what's the following code doing? - -:: - - total = reduce(lambda a, b: (0, a[1] + b[1]), items)[1] - -You can figure it out, but it takes time to disentangle the expression -to figure out what's going on. Using a short nested -``def`` statements makes things a little bit better:: - - def combine (a, b): - return 0, a[1] + b[1] - - total = reduce(combine, items)[1] - -But it would be best of all if I had simply used a ``for`` loop:: - - total = 0 - for a, b in items: - total += b - -Or the ``sum()`` built-in and a generator expression:: - - total = sum(b for a,b in items) - -Many uses of ``reduce()`` are clearer when written as ``for`` loops. - -Fredrik Lundh once suggested the following set of rules for refactoring -uses of ``lambda``: - -1) Write a lambda function. -2) Write a comment explaining what the heck that lambda does. -3) Study the comment for a while, and think of a name that captures - the essence of the comment. -4) Convert the lambda to a def statement, using that name. -5) Remove the comment. - -I really like these rules, but you're free to disagree that this -lambda-free style is better. - - -The itertools module ------------------------ - -The ``itertools`` module contains a number of commonly-used iterators -as well as functions for combining several iterators. This section -will introduce the module's contents by showing small examples. - -The module's functions fall into a few broad classes: - -* Functions that create a new iterator based on an existing iterator. -* Functions for treating an iterator's elements as function arguments. -* Functions for selecting portions of an iterator's output. -* A function for grouping an iterator's output. - -Creating new iterators -'''''''''''''''''''''' - -``itertools.count(n)`` returns an infinite stream of -integers, increasing by 1 each time. You can optionally supply the -starting number, which defaults to 0:: - - itertools.count() => - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ... - itertools.count(10) => - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, ... - -``itertools.cycle(iter)`` saves a copy of the contents of a provided -iterable and returns a new iterator that returns its elements from -first to last. The new iterator will repeat these elements infinitely. - -:: - - itertools.cycle([1,2,3,4,5]) => - 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, ... - -``itertools.repeat(elem, [n])`` returns the provided element ``n`` -times, or returns the element endlessly if ``n`` is not provided. - -:: - - itertools.repeat('abc') => - abc, abc, abc, abc, abc, abc, abc, abc, abc, abc, ... - itertools.repeat('abc', 5) => - abc, abc, abc, abc, abc - -``itertools.chain(iterA, iterB, ...)`` takes an arbitrary number of -iterables as input, and returns all the elements of the first -iterator, then all the elements of the second, and so on, until all of -the iterables have been exhausted. - -:: - - itertools.chain(['a', 'b', 'c'], (1, 2, 3)) => - a, b, c, 1, 2, 3 - -``itertools.izip(iterA, iterB, ...)`` takes one element from each iterable -and returns them in a tuple:: - - itertools.izip(['a', 'b', 'c'], (1, 2, 3)) => - ('a', 1), ('b', 2), ('c', 3) - -It's similiar to the built-in ``zip()`` function, but doesn't -construct an in-memory list and exhaust all the input iterators before -returning; instead tuples are constructed and returned only if they're -requested. (The technical term for this behaviour is -`lazy evaluation `__.) - -This iterator is intended to be used with iterables that are all of -the same length. If the iterables are of different lengths, the -resulting stream will be the same length as the shortest iterable. - -:: - - itertools.izip(['a', 'b'], (1, 2, 3)) => - ('a', 1), ('b', 2) - -You should avoid doing this, though, because an element may be taken -from the longer iterators and discarded. This means you can't go on -to use the iterators further because you risk skipping a discarded -element. - -``itertools.islice(iter, [start], stop, [step])`` returns a stream -that's a slice of the iterator. With a single ``stop`` argument, -it will return the first ``stop`` -elements. If you supply a starting index, you'll get ``stop-start`` -elements, and if you supply a value for ``step``, elements will be -skipped accordingly. Unlike Python's string and list slicing, you -can't use negative values for ``start``, ``stop``, or ``step``. - -:: - - itertools.islice(range(10), 8) => - 0, 1, 2, 3, 4, 5, 6, 7 - itertools.islice(range(10), 2, 8) => - 2, 3, 4, 5, 6, 7 - itertools.islice(range(10), 2, 8, 2) => - 2, 4, 6 - -``itertools.tee(iter, [n])`` replicates an iterator; it returns ``n`` -independent iterators that will all return the contents of the source -iterator. If you don't supply a value for ``n``, the default is 2. -Replicating iterators requires saving some of the contents of the source -iterator, so this can consume significant memory if the iterator is large -and one of the new iterators is consumed more than the others. - -:: - - itertools.tee( itertools.count() ) => - iterA, iterB - - where iterA -> - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ... - - and iterB -> - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ... - - -Calling functions on elements -''''''''''''''''''''''''''''' - -Two functions are used for calling other functions on the contents of an -iterable. - -``itertools.imap(f, iterA, iterB, ...)`` returns -a stream containing ``f(iterA[0], iterB[0]), f(iterA[1], iterB[1]), -f(iterA[2], iterB[2]), ...``:: - - itertools.imap(operator.add, [5, 6, 5], [1, 2, 3]) => - 6, 8, 8 - -The ``operator`` module contains a set of functions -corresponding to Python's operators. Some examples are -``operator.add(a, b)`` (adds two values), -``operator.ne(a, b)`` (same as ``a!=b``), -and -``operator.attrgetter('id')`` (returns a callable that -fetches the ``"id"`` attribute). - -``itertools.starmap(func, iter)`` assumes that the iterable will -return a stream of tuples, and calls ``f()`` using these tuples as the -arguments:: - - itertools.starmap(os.path.join, - [('/usr', 'bin', 'java'), ('/bin', 'python'), - ('/usr', 'bin', 'perl'),('/usr', 'bin', 'ruby')]) - => - /usr/bin/java, /bin/python, /usr/bin/perl, /usr/bin/ruby - - -Selecting elements -'''''''''''''''''' - -Another group of functions chooses a subset of an iterator's elements -based on a predicate. - -``itertools.ifilter(predicate, iter)`` returns all the elements for -which the predicate returns true:: - - def is_even(x): - return (x % 2) == 0 - - itertools.ifilter(is_even, itertools.count()) => - 0, 2, 4, 6, 8, 10, 12, 14, ... - -``itertools.ifilterfalse(predicate, iter)`` is the opposite, -returning all elements for which the predicate returns false:: - - itertools.ifilterfalse(is_even, itertools.count()) => - 1, 3, 5, 7, 9, 11, 13, 15, ... - -``itertools.takewhile(predicate, iter)`` returns elements for as long -as the predicate returns true. Once the predicate returns false, -the iterator will signal the end of its results. - -:: - - def less_than_10(x): - return (x < 10) - - itertools.takewhile(less_than_10, itertools.count()) => - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 - - itertools.takewhile(is_even, itertools.count()) => - 0 - -``itertools.dropwhile(predicate, iter)`` discards elements while the -predicate returns true, and then returns the rest of the iterable's -results. - -:: - - itertools.dropwhile(less_than_10, itertools.count()) => - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, ... - - itertools.dropwhile(is_even, itertools.count()) => - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ... - - -Grouping elements -''''''''''''''''' - -The last function I'll discuss, ``itertools.groupby(iter, -key_func=None)``, is the most complicated. ``key_func(elem)`` is a -function that can compute a key value for each element returned by the -iterable. If you don't supply a key function, the key is simply each -element itself. - -``groupby()`` collects all the consecutive elements from the -underlying iterable that have the same key value, and returns a stream -of 2-tuples containing a key value and an iterator for the elements -with that key. - -:: - - city_list = [('Decatur', 'AL'), ('Huntsville', 'AL'), ('Selma', 'AL'), - ('Anchorage', 'AK'), ('Nome', 'AK'), - ('Flagstaff', 'AZ'), ('Phoenix', 'AZ'), ('Tucson', 'AZ'), - ... - ] - - def get_state ((city, state)): - return state - - itertools.groupby(city_list, get_state) => - ('AL', iterator-1), - ('AK', iterator-2), - ('AZ', iterator-3), ... - - where - iterator-1 => - ('Decatur', 'AL'), ('Huntsville', 'AL'), ('Selma', 'AL') - iterator-2 => - ('Anchorage', 'AK'), ('Nome', 'AK') - iterator-3 => - ('Flagstaff', 'AZ'), ('Phoenix', 'AZ'), ('Tucson', 'AZ') - -``groupby()`` assumes that the underlying iterable's contents will -already be sorted based on the key. Note that the returned iterators -also use the underlying iterable, so you have to consume the results -of iterator-1 before requesting iterator-2 and its corresponding key. - - -The functools module ----------------------------------------------- - -The ``functools`` module in Python 2.5 contains some higher-order -functions. A **higher-order function** takes one or more functions as -input and returns a new function. The most useful tool in this module -is the ``partial()`` function. - -For programs written in a functional style, you'll sometimes want to -construct variants of existing functions that have some of the -parameters filled in. Consider a Python function ``f(a, b, c)``; you -may wish to create a new function ``g(b, c)`` that's equivalent to -``f(1, b, c)``; you're filling in a value for one of ``f()``'s parameters. -This is called "partial function application". - -The constructor for ``partial`` takes the arguments ``(function, arg1, -arg2, ... kwarg1=value1, kwarg2=value2)``. The resulting object is -callable, so you can just call it to invoke ``function`` with the -filled-in arguments. - -Here's a small but realistic example:: - - import functools - - def log (message, subsystem): - "Write the contents of 'message' to the specified subsystem." - print '%s: %s' % (subsystem, message) - ... - - server_log = functools.partial(log, subsystem='server') - server_log('Unable to open socket') - - -The operator module -------------------- - -The ``operator`` module was mentioned earlier. It contains a set of -functions corresponding to Python's operators. These functions -are often useful in functional-style code because they save you -from writing trivial functions that perform a single operation. - -Some of the functions in this module are: - -* Math operations: ``add()``, ``sub()``, ``mul()``, ``div()``, ``floordiv()``, - ``abs()``, ... -* Logical operations: ``not_()``, ``truth()``. -* Bitwise operations: ``and_()``, ``or_()``, ``invert()``. -* Comparisons: ``eq()``, ``ne()``, ``lt()``, ``le()``, ``gt()``, and ``ge()``. -* Object identity: ``is_()``, ``is_not()``. - -Consult `the operator module's documentation `__ for a complete -list. - - - -The functional module ---------------------- - -Collin Winter's `functional module `__ -provides a number of more -advanced tools for functional programming. It also reimplements -several Python built-ins, trying to make them more intuitive to those -used to functional programming in other languages. - -This section contains an introduction to some of the most important -functions in ``functional``; full documentation can be found at `the -project's website `__. - -``compose(outer, inner, unpack=False)`` - -The ``compose()`` function implements function composition. -In other words, it returns a wrapper around the ``outer`` and ``inner`` callables, such -that the return value from ``inner`` is fed directly to ``outer``. That is, - -:: - - >>> def add(a, b): - ... return a + b - ... - >>> def double(a): - ... return 2 * a - ... - >>> compose(double, add)(5, 6) - 22 - -is equivalent to - -:: - - >>> double(add(5, 6)) - 22 - -The ``unpack`` keyword is provided to work around the fact that Python functions are not always -`fully curried `__. -By default, it is expected that the ``inner`` function will return a single object and that the ``outer`` -function will take a single argument. Setting the ``unpack`` argument causes ``compose`` to expect a -tuple from ``inner`` which will be expanded before being passed to ``outer``. Put simply, - -:: - - compose(f, g)(5, 6) - -is equivalent to:: - - f(g(5, 6)) - -while - -:: - - compose(f, g, unpack=True)(5, 6) - -is equivalent to:: - - f(*g(5, 6)) - -Even though ``compose()`` only accepts two functions, it's trivial to -build up a version that will compose any number of functions. We'll -use ``reduce()``, ``compose()`` and ``partial()`` (the last of which -is provided by both ``functional`` and ``functools``). - -:: - - from functional import compose, partial - - multi_compose = partial(reduce, compose) - - -We can also use ``map()``, ``compose()`` and ``partial()`` to craft a -version of ``"".join(...)`` that converts its arguments to string:: - - from functional import compose, partial - - join = compose("".join, partial(map, str)) - - -``flip(func)`` - -``flip()`` wraps the callable in ``func`` and -causes it to receive its non-keyword arguments in reverse order. - -:: - - >>> def triple(a, b, c): - ... return (a, b, c) - ... - >>> triple(5, 6, 7) - (5, 6, 7) - >>> - >>> flipped_triple = flip(triple) - >>> flipped_triple(5, 6, 7) - (7, 6, 5) - -``foldl(func, start, iterable)`` - -``foldl()`` takes a binary function, a starting value (usually some kind of 'zero'), and an iterable. -The function is applied to the starting value and the first element of the list, then the result of -that and the second element of the list, then the result of that and the third element of the list, -and so on. - -This means that a call such as:: - - foldl(f, 0, [1, 2, 3]) - -is equivalent to:: - - f(f(f(0, 1), 2), 3) - - -``foldl()`` is roughly equivalent to the following recursive function:: - - def foldl(func, start, seq): - if len(seq) == 0: - return start - - return foldl(func, func(start, seq[0]), seq[1:]) - -Speaking of equivalence, the above ``foldl`` call can be expressed in terms of the built-in ``reduce`` like -so:: - - reduce(f, [1, 2, 3], 0) - - -We can use ``foldl()``, ``operator.concat()`` and ``partial()`` to -write a cleaner, more aesthetically-pleasing version of Python's -``"".join(...)`` idiom:: - - from functional import foldl, partial - from operator import concat - - join = partial(foldl, concat, "") - - -Revision History and Acknowledgements ------------------------------------------------- - -The author would like to thank the following people for offering -suggestions, corrections and assistance with various drafts of this -article: Ian Bicking, Nick Coghlan, Nick Efford, Raymond Hettinger, -Jim Jewett, Mike Krell, Leandro Lameiro, Jussi Salmela, -Collin Winter, Blake Winton. - -Version 0.1: posted June 30 2006. - -Version 0.11: posted July 1 2006. Typo fixes. - -Version 0.2: posted July 10 2006. Merged genexp and listcomp -sections into one. Typo fixes. - -Version 0.21: Added more references suggested on the tutor mailing list. - -Version 0.30: Adds a section on the ``functional`` module written by -Collin Winter; adds short section on the operator module; a few other -edits. - - -References --------------------- - -General -''''''''''''''' - -**Structure and Interpretation of Computer Programs**, by -Harold Abelson and Gerald Jay Sussman with Julie Sussman. -Full text at http://mitpress.mit.edu/sicp/. -In this classic textbook of computer science, chapters 2 and 3 discuss the -use of sequences and streams to organize the data flow inside a -program. The book uses Scheme for its examples, but many of the -design approaches described in these chapters are applicable to -functional-style Python code. - -http://www.defmacro.org/ramblings/fp.html: A general -introduction to functional programming that uses Java examples -and has a lengthy historical introduction. - -http://en.wikipedia.org/wiki/Functional_programming: -General Wikipedia entry describing functional programming. - -http://en.wikipedia.org/wiki/Coroutine: -Entry for coroutines. - -http://en.wikipedia.org/wiki/Currying: -Entry for the concept of currying. - -Python-specific -''''''''''''''''''''''''''' - -http://gnosis.cx/TPiP/: -The first chapter of David Mertz's book :title-reference:`Text Processing in Python` -discusses functional programming for text processing, in the section titled -"Utilizing Higher-Order Functions in Text Processing". - -Mertz also wrote a 3-part series of articles on functional programming -for IBM's DeveloperWorks site; see -`part 1 `__, -`part 2 `__, and -`part 3 `__, - - -Python documentation -''''''''''''''''''''''''''' - -http://docs.python.org/lib/module-itertools.html: -Documentation for the ``itertools`` module. - -http://docs.python.org/lib/module-operator.html: -Documentation for the ``operator`` module. - -http://www.python.org/dev/peps/pep-0289/: -PEP 289: "Generator Expressions" - -http://www.python.org/dev/peps/pep-0342/ -PEP 342: "Coroutines via Enhanced Generators" describes the new generator -features in Python 2.5. - -.. comment - - Topics to place - ----------------------------- - - XXX os.walk() - - XXX Need a large example. - - But will an example add much? I'll post a first draft and see - what the comments say. - -.. comment - - Original outline: - Introduction - Idea of FP - Programs built out of functions - Functions are strictly input-output, no internal state - Opposed to OO programming, where objects have state - - Why FP? - Formal provability - Assignment is difficult to reason about - Not very relevant to Python - Modularity - Small functions that do one thing - Debuggability: - Easy to test due to lack of state - Easy to verify output from intermediate steps - Composability - You assemble a toolbox of functions that can be mixed - - Tackling a problem - Need a significant example - - Iterators - Generators - The itertools module - List comprehensions - Small functions and the lambda statement - Built-in functions - map - filter - reduce - -.. comment - - Handy little function for printing part of an iterator -- used - while writing this document. - - import itertools - def print_iter(it): - slice = itertools.islice(it, 10) - for elem in slice[:-1]: - sys.stdout.write(str(elem)) - sys.stdout.write(', ') - print elem[-1] - - diff --git a/tests/examplefiles/garcia-wachs.kk b/tests/examplefiles/garcia-wachs.kk deleted file mode 100644 index 91a01fbe..00000000 --- a/tests/examplefiles/garcia-wachs.kk +++ /dev/null @@ -1,133 +0,0 @@ -// Koka language test module - -// This module implements the GarsiaWachs algorithm. -// It is an adaptation of the algorithm in ML as described by JeanChristophe Filli�tre: -// in ''A functional implementation of the GarsiaWachs algorithm. (functional pearl). ML workshop 2008, pages 91--96''. -// See: http://www.lri.fr/~filliatr/publis/gwWml08.pdf -// -// The algorithm is interesting since it uses mutable references shared between a list and tree but the -// side effects are not observable from outside. Koka automatically infers that the final algorithm is pure. -// Note: due to a current limitation in the divergence analysis, koka cannot yet infer that mutually recursive -// definitions in "insert" and "extract" are terminating and the final algorithm still has a divergence effect. -// However, koka does infer that no other effect (i.e. an exception due to a partial match) can occur. -module garcsiaWachs - -import test = qualified std/flags - -# pre processor test - -public function main() { - wlist = Cons1(('a',3), [('b',2),('c',1),('d',4),('e',5)]) - tree = wlist.garsiaWachs() - tree.show.println() -} - -//---------------------------------------------------- -// Trees -//---------------------------------------------------- -public type tree { - con Leaf(value :a) - con Node(left :tree, right :tree) -} - -function show( t : tree ) : string { - match(t) { - Leaf(c) -> core/show(c) - Node(l,r) -> "Node(" + show(l) + "," + show(r) + ")" - } -} - - -//---------------------------------------------------- -// Non empty lists -//---------------------------------------------------- -public type list1 { - Cons1( head : a, tail : list ) -} - -function map( xs, f ) { - val Cons1(y,ys) = xs - return Cons1(f(y), core/map(ys,f)) -} - -function zip( xs :list1, ys :list1 ) : list1<(a,b)> { - Cons1( (xs.head, ys.head), zip(xs.tail, ys.tail)) -} - - -//---------------------------------------------------- -// Phase 1 -//---------------------------------------------------- - -function insert( after : list<(tree,int)>, t : (tree,int), before : list<(tree,int)> ) : div tree -{ - match(before) { - Nil -> extract( [], Cons1(t,after) ) - Cons(x,xs) -> { - if (x.snd < t.snd) then return insert( Cons(x,after), t, xs ) - match(xs) { - Nil -> extract( [], Cons1(x,Cons(t,after)) ) - Cons(y,ys) -> extract( ys, Cons1(y,Cons(x,Cons(t,after))) ) - } - } - } -} - -function extract( before : list<(tree,int)>, after : list1<(tree,int)> ) : div tree -{ - val Cons1((t1,w1) as x, xs ) = after - match(xs) { - Nil -> t1 - Cons((t2,w2) as y, ys) -> match(ys) { - Nil -> insert( [], (Node(t1,t2), w1+w2), before ) - Cons((_,w3),_zs) -> - if (w1 <= w3) - then insert(ys, (Node(t1,t2), w1+w2), before) - else extract(Cons(x,before), Cons1(y,ys)) - } - } -} - -function balance( xs : list1<(tree,int)> ) : div tree { - extract( [], xs ) -} - -//---------------------------------------------------- -// Phase 2 -//---------------------------------------------------- - -function mark( depth :int, t :tree<(a,ref)> ) : > () { - match(t) { - Leaf((_,d)) -> d := depth - Node(l,r) -> { mark(depth+1,l); mark(depth+1,r) } - } -} - -function build( depth :int, xs :list1<(a,ref)> ) : ,div> (tree,list<(a,ref)>) -{ - if (!(xs.head.snd) == depth) return (Leaf(xs.head.fst), xs.tail) - - l = build(depth+1, xs) - match(l.snd) { - Nil -> (l.fst, Nil) - Cons(y,ys) -> { - r = build(depth+1, Cons1(y,ys)) - (Node(l.fst,r.fst), r.snd) - } - } -} - -//---------------------------------------------------- -// Main -//---------------------------------------------------- - -public function garsiaWachs( xs : list1<(a,int)> ) : div tree -{ - refs = xs.map(fst).map( fun(x) { (x, ref(0)) } ) - wleafs = zip( refs.map(Leaf), xs.map(snd) ) - - tree = balance(wleafs) - mark(0,tree) - build(0,refs).fst -} - diff --git a/tests/examplefiles/genclass.clj b/tests/examplefiles/genclass.clj deleted file mode 100644 index c63da8fd..00000000 --- a/tests/examplefiles/genclass.clj +++ /dev/null @@ -1,510 +0,0 @@ -; Copyright (c) Rich Hickey. All rights reserved. -; The use and distribution terms for this software are covered by the -; Common Public License 1.0 (http://opensource.org/licenses/cpl.php) -; which can be found in the file CPL.TXT at the root of this distribution. -; By using this software in any fashion, you are agreeing to be bound by -; the terms of this license. -; You must not remove this notice, or any other, from this software. - -(in-ns 'clojure) - -(import '(java.lang.reflect Modifier Constructor) - '(clojure.asm ClassWriter ClassVisitor Opcodes Type) - '(clojure.asm.commons Method GeneratorAdapter) - '(clojure.lang IPersistentMap)) - -;(defn method-sig [#^java.lang.reflect.Method meth] -; [(. meth (getName)) (seq (. meth (getParameterTypes)))]) - -(defn- non-private-methods [#^Class c] - (loop [mm {} - considered #{} - c c] - (if c - (let [[mm considered] - (loop [mm mm - considered considered - meths (concat - (seq (. c (getDeclaredMethods))) - (seq (. c (getMethods))))] - (if meths - (let [#^Method meth (first meths) - mods (. meth (getModifiers)) - mk (method-sig meth)] - (if (or (considered mk) - (. Modifier (isPrivate mods)) - (. Modifier (isStatic mods)) - (. Modifier (isFinal mods))) - (recur mm (conj considered mk) (rest meths)) - (recur (assoc mm mk meth) (conj considered mk) (rest meths)))) - [mm considered]))] - (recur mm considered (. c (getSuperclass)))) - mm))) - -(defn- ctor-sigs [super] - (for [#^Constructor ctor (. super (getDeclaredConstructors)) - :when (not (. Modifier (isPrivate (. ctor (getModifiers)))))] - (apply vector (. ctor (getParameterTypes))))) - -(defn- escape-class-name [c] - (.. (.getSimpleName c) - (replace "[]" "<>"))) - -(defn- overload-name [mname pclasses] - (if (seq pclasses) - (apply str mname (interleave (repeat \-) - (map escape-class-name pclasses))) - (str mname "-void"))) - -;(distinct (map first(keys (mapcat non-private-methods [Object IPersistentMap])))) - -(defn gen-class - "Generates compiled bytecode for a class with the given - package-qualified cname (which, as all names in these parameters, can - be a string or symbol). The gen-class construct contains no - implementation, as the implementation will be dynamically sought by - the generated class in functions in a corresponding Clojure - namespace. Given a generated class org.mydomain.MyClass, methods - will be implemented that look for same-named functions in a Clojure - namespace called org.domain.MyClass. The init and main - functions (see below) will be found similarly. The static - initializer for the generated class will attempt to load the Clojure - support code for the class as a resource from the claspath, e.g. in - the example case, org/mydomain/MyClass.clj - - Returns a map containing :name and :bytecode. Most uses will be - satisfied by the higher-level gen-and-load-class and - gen-and-store-class functions, which generate and immediately load, - or generate and store to disk, respectively. - - Options should be a set of key/value pairs, all of which are optional: - - :extends aclass - - Specifies the superclass, the non-private methods of which will be - overridden by the class. If not provided, defaults to Object. - - :implements [interface ...] - - One or more interfaces, the methods of which will be implemented by the class. - - :init name - - If supplied, names a function that will be called with the arguments - to the constructor. Must return [[superclass-constructor-args] state] - If not supplied, the constructor args are passed directly to - the superclass constructor and the state will be nil - - :constructors {[param-types] [super-param-types], ...} - - By default, constructors are created for the generated class which - match the signature(s) of the constructors for the superclass. This - parameter may be used to explicitly specify constructors, each entry - providing a mapping from a constructor signature to a superclass - constructor signature. When you supply this, you must supply an :init - specifier. - - :methods [[name [param-types] return-type], ...] - - The generated class automatically defines all of the non-private - methods of its superclasses/interfaces. This parameter can be used - to specify the signatures of additional methods of the generated - class. Do not repeat superclass/interface signatures here. - - :main boolean - - If supplied and true, a static public main function will be - generated. It will pass each string of the String[] argument as a - separate argument to a function called 'main. - - :factory name - - If supplied, a (set of) public static factory function(s) will be - created with the given name, and the same signature(s) as the - constructor(s). - - :state name - - If supplied, a public final instance field with the given name will be - created. You must supply an :init function in order to provide a - value for the state. Note that, though final, the state can be a ref - or agent, supporting the creation of Java objects with transactional - or asynchronous mutation semantics. - - :exposes {protected-field-name {:get name :set name}, ...} - - Since the implementations of the methods of the generated class - occur in Clojure functions, they have no access to the inherited - protected fields of the superclass. This parameter can be used to - generate public getter/setter methods exposing the protected field(s) - for use in the implementation." - - [cname & options] - (let [name (str cname) - {:keys [extends implements constructors methods main factory state init exposes]} (apply hash-map options) - super (or extends Object) - interfaces implements - supers (cons super (seq interfaces)) - ctor-sig-map (or constructors (zipmap (ctor-sigs super) (ctor-sigs super))) - cv (new ClassWriter (. ClassWriter COMPUTE_MAXS)) - cname (. name (replace "." "/")) - ctype (. Type (getObjectType cname)) - iname (fn [c] (.. Type (getType c) (getInternalName))) - totype (fn [c] (. Type (getType c))) - to-types (fn [cs] (if (pos? (count cs)) - (into-array (map totype cs)) - (make-array Type 0))) - obj-type (totype Object) - arg-types (fn [n] (if (pos? n) - (into-array (replicate n obj-type)) - (make-array Type 0))) - super-type (totype super) - init-name (str init) - factory-name (str factory) - state-name (str state) - main-name "main" - var-name (fn [s] (str s "__var")) - rt-type (totype clojure.lang.RT) - var-type (totype clojure.lang.Var) - ifn-type (totype clojure.lang.IFn) - iseq-type (totype clojure.lang.ISeq) - ex-type (totype java.lang.UnsupportedOperationException) - all-sigs (distinct (concat (map #(let[[m p] (key %)] {m [p]}) (mapcat non-private-methods supers)) - (map (fn [[m p]] {(str m) [p]}) methods))) - sigs-by-name (apply merge-with concat {} all-sigs) - overloads (into {} (filter (fn [[m s]] (rest s)) sigs-by-name)) - var-fields (concat (and init [init-name]) - (and main [main-name]) - (distinct (concat (keys sigs-by-name) - (mapcat (fn [[m s]] (map #(overload-name m %) s)) overloads) - (mapcat (comp (partial map str) vals val) exposes)))) - emit-get-var (fn [gen v] - (let [false-label (. gen newLabel) - end-label (. gen newLabel)] - (. gen getStatic ctype (var-name v) var-type) - (. gen dup) - (. gen invokeVirtual var-type (. Method (getMethod "boolean isBound()"))) - (. gen ifZCmp (. GeneratorAdapter EQ) false-label) - (. gen invokeVirtual var-type (. Method (getMethod "Object get()"))) - (. gen goTo end-label) - (. gen mark false-label) - (. gen pop) - (. gen visitInsn (. Opcodes ACONST_NULL)) - (. gen mark end-label))) - emit-forwarding-method - (fn [mname pclasses rclass else-gen] - (let [ptypes (to-types pclasses) - rtype (totype rclass) - m (new Method mname rtype ptypes) - is-overload (overloads mname) - gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv) - found-label (. gen (newLabel)) - else-label (. gen (newLabel)) - end-label (. gen (newLabel))] - (. gen (visitCode)) - (when is-overload - (emit-get-var gen (overload-name mname pclasses)) - (. gen (dup)) - (. gen (ifNonNull found-label)) - (. gen (pop))) - (emit-get-var gen mname) - (. gen (dup)) - (. gen (ifNull else-label)) - (when is-overload - (. gen (mark found-label))) - ;if found - (. gen (loadThis)) - ;box args - (dotimes i (count ptypes) - (. gen (loadArg i)) - (. clojure.lang.Compiler$HostExpr (emitBoxReturn nil gen (nth pclasses i)))) - ;call fn - (. gen (invokeInterface ifn-type (new Method "invoke" obj-type - (into-array (cons obj-type - (replicate (count ptypes) obj-type)))))) - ;unbox return - (. gen (unbox rtype)) - (when (= (. rtype (getSort)) (. Type VOID)) - (. gen (pop))) - (. gen (goTo end-label)) - - ;else call supplied alternative generator - (. gen (mark else-label)) - (. gen (pop)) - - (else-gen gen m) - - (. gen (mark end-label)) - (. gen (returnValue)) - (. gen (endMethod)))) - ] - ;start class definition - (. cv (visit (. Opcodes V1_5) (. Opcodes ACC_PUBLIC) - cname nil (iname super) - (when interfaces - (into-array (map iname interfaces))))) - - ;static fields for vars - (doseq v var-fields - (. cv (visitField (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_FINAL) (. Opcodes ACC_STATIC)) - (var-name v) - (. var-type getDescriptor) - nil nil))) - - ;instance field for state - (when state - (. cv (visitField (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_FINAL)) - state-name - (. obj-type getDescriptor) - nil nil))) - - ;static init to set up var fields and load clj - (let [gen (new GeneratorAdapter (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_STATIC)) - (. Method getMethod "void ()") - nil nil cv)] - (. gen (visitCode)) - (doseq v var-fields - (. gen push name) - (. gen push v) - (. gen (invokeStatic rt-type (. Method (getMethod "clojure.lang.Var var(String,String)")))) - (. gen putStatic ctype (var-name v) var-type)) - - (. gen push ctype) - (. gen push (str (. name replace \. (. java.io.File separatorChar)) ".clj")) - (. gen (invokeStatic rt-type (. Method (getMethod "void loadResourceScript(Class,String)")))) - - (. gen (returnValue)) - (. gen (endMethod))) - - ;ctors - (doseq [pclasses super-pclasses] ctor-sig-map - (let [ptypes (to-types pclasses) - super-ptypes (to-types super-pclasses) - m (new Method "" (. Type VOID_TYPE) ptypes) - super-m (new Method "" (. Type VOID_TYPE) super-ptypes) - gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv) - no-init-label (. gen newLabel) - end-label (. gen newLabel) - nth-method (. Method (getMethod "Object nth(Object,int)")) - local (. gen newLocal obj-type)] - (. gen (visitCode)) - - (if init - (do - (emit-get-var gen init-name) - (. gen dup) - (. gen ifNull no-init-label) - ;box init args - (dotimes i (count pclasses) - (. gen (loadArg i)) - (. clojure.lang.Compiler$HostExpr (emitBoxReturn nil gen (nth pclasses i)))) - ;call init fn - (. gen (invokeInterface ifn-type (new Method "invoke" obj-type - (arg-types (count ptypes))))) - ;expecting [[super-ctor-args] state] returned - (. gen dup) - (. gen push 0) - (. gen (invokeStatic rt-type nth-method)) - (. gen storeLocal local) - - (. gen (loadThis)) - (. gen dupX1) - (dotimes i (count super-pclasses) - (. gen loadLocal local) - (. gen push i) - (. gen (invokeStatic rt-type nth-method)) - (. clojure.lang.Compiler$HostExpr (emitUnboxArg nil gen (nth super-pclasses i)))) - (. gen (invokeConstructor super-type super-m)) - - (if state - (do - (. gen push 1) - (. gen (invokeStatic rt-type nth-method)) - (. gen (putField ctype state-name obj-type))) - (. gen pop)) - - (. gen goTo end-label) - ;no init found - (. gen mark no-init-label) - (. gen (throwException ex-type (str init-name " not defined"))) - (. gen mark end-label)) - (if (= pclasses super-pclasses) - (do - (. gen (loadThis)) - (. gen (loadArgs)) - (. gen (invokeConstructor super-type super-m))) - (throw (new Exception ":init not specified, but ctor and super ctor args differ")))) - - (. gen (returnValue)) - (. gen (endMethod)) - ;factory - (when factory - (let [fm (new Method factory-name ctype ptypes) - gen (new GeneratorAdapter (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_STATIC)) - fm nil nil cv)] - (. gen (visitCode)) - (. gen newInstance ctype) - (. gen dup) - (. gen (loadArgs)) - (. gen (invokeConstructor ctype m)) - (. gen (returnValue)) - (. gen (endMethod)))))) - - ;add methods matching supers', if no fn -> call super - (let [mm (non-private-methods super)] - (doseq #^java.lang.reflect.Method meth (vals mm) - (emit-forwarding-method (.getName meth) (.getParameterTypes meth) (.getReturnType meth) - (fn [gen m] - (. gen (loadThis)) - ;push args - (. gen (loadArgs)) - ;call super - (. gen (visitMethodInsn (. Opcodes INVOKESPECIAL) - (. super-type (getInternalName)) - (. m (getName)) - (. m (getDescriptor))))))) - ;add methods matching interfaces', if no fn -> throw - (doseq #^Class iface interfaces - (doseq #^java.lang.reflect.Method meth (. iface (getMethods)) - (when-not (contains? mm (method-sig meth)) - (emit-forwarding-method (.getName meth) (.getParameterTypes meth) (.getReturnType meth) - (fn [gen m] - (. gen (throwException ex-type (. m (getName))))))))) - ;extra methods - (doseq [mname pclasses rclass :as msig] methods - (emit-forwarding-method (str mname) pclasses rclass - (fn [gen m] - (. gen (throwException ex-type (. m (getName)))))))) - - ;main - (when main - (let [m (. Method getMethod "void main (String[])") - gen (new GeneratorAdapter (+ (. Opcodes ACC_PUBLIC) (. Opcodes ACC_STATIC)) - m nil nil cv) - no-main-label (. gen newLabel) - end-label (. gen newLabel)] - (. gen (visitCode)) - - (emit-get-var gen main-name) - (. gen dup) - (. gen ifNull no-main-label) - (. gen loadArgs) - (. gen (invokeStatic rt-type (. Method (getMethod "clojure.lang.ISeq seq(Object)")))) - (. gen (invokeInterface ifn-type (new Method "applyTo" obj-type - (into-array [iseq-type])))) - (. gen pop) - (. gen goTo end-label) - ;no main found - (. gen mark no-main-label) - (. gen (throwException ex-type (str main-name " not defined"))) - (. gen mark end-label) - (. gen (returnValue)) - (. gen (endMethod)))) - ;field exposers - (doseq [f {getter :get setter :set}] exposes - (let [fld (.getField super (str f)) - ftype (totype (.getType fld))] - (when getter - (let [m (new Method (str getter) ftype (to-types [])) - gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv)] - (. gen (visitCode)) - (. gen loadThis) - (. gen getField ctype (str f) ftype) - (. gen (returnValue)) - (. gen (endMethod)))) - (when setter - (let [m (new Method (str setter) (. Type VOID_TYPE) (into-array [ftype])) - gen (new GeneratorAdapter (. Opcodes ACC_PUBLIC) m nil nil cv)] - (. gen (visitCode)) - (. gen loadThis) - (. gen loadArgs) - (. gen putField ctype (str f) ftype) - (. gen (returnValue)) - (. gen (endMethod)))))) - ;finish class def - (. cv (visitEnd)) - {:name name :bytecode (. cv (toByteArray))})) - -(defn gen-and-load-class - "Generates and immediately loads the bytecode for the specified - class. Note that a class generated this way can be loaded only once - - the JVM supports only one class with a given name per - classloader. Subsequent to generation you can import it into any - desired namespaces just like any other class. See gen-class for a - description of the options." - - [name & options] - (let [{:keys [name bytecode]} - (apply gen-class (str name) options)] - (.. clojure.lang.RT ROOT_CLASSLOADER (defineClass (str name) bytecode)))) - -(defn gen-and-save-class - "Generates the bytecode for the named class and stores in a .class - file in a subpath of the supplied path, the directories for which - must already exist. See gen-class for a description of the options" - - [path name & options] - (let [{:keys [name bytecode]} (apply gen-class (str name) options) - file (java.io.File. path (str (. name replace \. (. java.io.File separatorChar)) ".class"))] - (.createNewFile file) - (with-open f (java.io.FileOutputStream. file) - (.write f bytecode)))) - -(comment -;usage -(gen-class - package-qualified-name - ;all below are optional - :extends aclass - :implements [interface ...] - :constructors {[param-types] [super-param-types], } - :methods [[name [param-types] return-type], ] - :main boolean - :factory name - :state name - :init name - :exposes {protected-field {:get name :set name}, }) - -;(gen-and-load-class -(clojure/gen-and-save-class - "/Users/rich/Downloads" - 'fred.lucy.Ethel - :extends clojure.lang.Box ;APersistentMap - :implements [clojure.lang.IPersistentMap] - :state 'state - ;:constructors {[Object] [Object]} - ;:init 'init - :main true - :factory 'create - :methods [['foo [Object] Object] - ['foo [] Object]] - :exposes {'val {:get 'getVal :set 'setVal}}) - -(in-ns 'fred.lucy.Ethel__2276) -(clojure/refer 'clojure :exclude '(assoc seq count cons)) -(defn init [n] [[] n]) -(defn foo - ([this] :foo) - ([this x] x)) -(defn main [x y] (println x y)) -(in-ns 'user) -(def ethel (new fred.lucy.Ethel__2276 42)) -(def ethel (fred.lucy.Ethel__2276.create 21)) -(fred.lucy.Ethel__2276.main (into-array ["lucy" "ricky"])) -(.state ethel) -(.foo ethel 7) -(.foo ethel) -(.getVal ethel) -(.setVal ethel 12) - -(gen-class org.clojure.MyComparator :implements [Comparator]) -(in-ns 'org.clojure.MyComparator) -(defn compare [this x y] ...) - -(load-file "/Users/rich/dev/clojure/src/genclass.clj") - -(clojure/gen-and-save-class "/Users/rich/dev/clojure/gen/" - 'org.clojure.ClojureServlet - :extends javax.servlet.http.HttpServlet) - -) diff --git a/tests/examplefiles/genshi_example.xml+genshi b/tests/examplefiles/genshi_example.xml+genshi deleted file mode 100644 index 8576b042..00000000 --- a/tests/examplefiles/genshi_example.xml+genshi +++ /dev/null @@ -1,193 +0,0 @@ - - - - - $title - - - - - - - - (${v or 'No'} match${v != 1 and 'es' or ''}) - - -
-

$title ${num_matches(len(tickets))}

- -
-
- Filters -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - -
- - -

- - - - -

- -

- - -

- -
- - - -
-
- - - - - - -
- Note: See TracQuery - for help on using queries. -
- -
- - diff --git a/tests/examplefiles/genshitext_example.genshitext b/tests/examplefiles/genshitext_example.genshitext deleted file mode 100644 index b82708d2..00000000 --- a/tests/examplefiles/genshitext_example.genshitext +++ /dev/null @@ -1,33 +0,0 @@ - ## a comment - -\## not a comment - -#if foo - ${bar} -#endif - -The answer is: -#choose - #when 0 == 1 - 0 - #end - #when 1 == 1 - 1 - #end - #otherwise - 2 - #end -#end -- comment about choose - -#for item in items - * ${item} -#end - -#def greeting(name) - Hello, ${name}! -#end -${greeting('world')} - -#with y=7; z=x+10 - $x $y $z -#end diff --git a/tests/examplefiles/glsl.frag b/tests/examplefiles/glsl.frag deleted file mode 100644 index 132b0353..00000000 --- a/tests/examplefiles/glsl.frag +++ /dev/null @@ -1,7 +0,0 @@ -/* Fragment shader */ -void main() -{ - gl_FragColor[0] = gl_FragCoord[0] / 400.0; - gl_FragColor[1] = gl_FragCoord[1] / 400.0; - gl_FragColor[2] = 1.0; -} diff --git a/tests/examplefiles/glsl.vert b/tests/examplefiles/glsl.vert deleted file mode 100644 index 23dc6a6b..00000000 --- a/tests/examplefiles/glsl.vert +++ /dev/null @@ -1,13 +0,0 @@ -/* Vertex shader */ -uniform float waveTime; -uniform float waveWidth; -uniform float waveHeight; - -void main(void) -{ - vec4 v = vec4(gl_Vertex); - - v.z = sin(waveWidth * v.x + waveTime) * cos(waveWidth * v.y + waveTime) * waveHeight; - - gl_Position = gl_ModelViewProjectionMatrix * v; -} diff --git a/tests/examplefiles/grammar-test.p6 b/tests/examplefiles/grammar-test.p6 deleted file mode 100644 index 28107f3e..00000000 --- a/tests/examplefiles/grammar-test.p6 +++ /dev/null @@ -1,22 +0,0 @@ -token pod_formatting_code { - $=<[A..Z]> - '<' { $*POD_IN_FORMATTINGCODE := 1 } - $=[ '> ]+ - '>' { $*POD_IN_FORMATTINGCODE := 0 } -} - -token pod_string { - + -} - -token something:sym«<» { - -} - -token name { - -} - -token comment:sym<#> { - '#' {} \N* -} diff --git a/tests/examplefiles/guidance.smv b/tests/examplefiles/guidance.smv deleted file mode 100644 index 671d1e1c..00000000 --- a/tests/examplefiles/guidance.smv +++ /dev/null @@ -1,1124 +0,0 @@ --- --- Shuttle Digital Autopilot --- by Sergey Berezin (berez@cs.cmu.edu) --- -MODULE cont_3eo_mode_select(start,smode5,vel,q_bar,apogee_alt_LT_alt_ref, - h_dot_LT_hdot_reg2,alpha_n_GRT_alpha_reg2, - delta_r_GRT_del_r_usp,v_horiz_dnrng_LT_0, - high_rate_sep,meco_confirmed) - -VAR cont_3EO_start: boolean; - RTLS_abort_declared: boolean; - region_selected : boolean; - m_mode: {mm102, mm103, mm601}; - r: {reg-1, reg0, reg1, reg2, reg3, reg102}; - step : {1,2,3,4,5,6,7,8,9,10, exit, undef}; - -ASSIGN - init(cont_3EO_start) := FALSE; - init(m_mode) := {mm102, mm103}; - init(region_selected) := FALSE; - init(RTLS_abort_declared) := FALSE; - init(r) := reg-1; - init(step) := undef; - - next(step) := - case - step = 1 & m_mode = mm102 : exit; - step = 1 : 2; - step = 2 & smode5 : 5; - step = 2 & vel = GRT_vi_3eo_max: exit; - step = 2 : 3; - step = 3 & vel = LEQ_vi_3eo_min : 6; - step = 3 : 4; - step = 4 & apogee_alt_LT_alt_ref: exit; - step = 4 : 6; - step = 5 : 6; - step = 6 & r = reg0 : exit; - step = 6 : 7; - step = 7 : 8; - step = 8 & q_bar = GRT_qbar_reg3 & !high_rate_sep : 10; - step = 8 : 9; - step = 9 : 10; - step = 10: exit; - next(start): 1; - step = exit : undef; - TRUE: step; - esac; - - next(cont_3EO_start) := - case - step = 1 & m_mode = mm102 : TRUE; - step = 10 & meco_confirmed : TRUE; - TRUE : cont_3EO_start; - esac; - - next(r) := - case - step = 1 & m_mode = mm102 : reg102; - step = 2 & !smode5 & vel = GRT_vi_3eo_max: reg0; - step = 4 & apogee_alt_LT_alt_ref: reg0; - step = 5 & v_horiz_dnrng_LT_0 & delta_r_GRT_del_r_usp : reg0; - step = 8 & q_bar = GRT_qbar_reg3 & !high_rate_sep : reg3; - step = 9: case - (h_dot_LT_hdot_reg2 & alpha_n_GRT_alpha_reg2 & - q_bar = GRT_qbar_reg1) | high_rate_sep : reg2; - TRUE : reg1; - esac; - next(step) = 1 : reg-1; - TRUE: r; - esac; - - next(RTLS_abort_declared) := - case - step = 10 & meco_confirmed & m_mode = mm103 : TRUE; - TRUE: RTLS_abort_declared; - esac; - - next(m_mode) := - case - step = 10 & meco_confirmed & m_mode = mm103 : mm601; - TRUE: m_mode; - esac; - - next(region_selected) := - case - next(step) = 1 : FALSE; - next(step) = exit : TRUE; - TRUE : region_selected; - esac; - -MODULE cont_3eo_guide(start,cont_3EO_start, mode_select_completed, et_sep_cmd, - h_dot_LT_0, q_bar_a_GRT_qbar_max_sep, m_mode, r0, - cont_minus_z_compl, t_nav-t_et_sep_GRT_dt_min_z_102, - ABS_q_orb_GRT_q_minus_z_max, ABS_r_orb_GRT_r_minus_z_max, - excess_OMS_propellant, q_bar_a_LT_qbar_oms_dump, - entry_mnvr_couter_LE_0, rcs_all_jet_inhibit, - alt_GRT_alt_min_102_dump, t_nav-t_gmtlo_LT_t_dmp_last, - pre_sep, cond_18, q_orb_LT_0, ABS_alf_err_LT_alf_sep_err, - cond_20b, cond_21, ABS_beta_n_GRT_beta_max, cond_24, cond_26, - cond_27, cond_29, mm602_OK) -VAR - step: {1,a1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20, - b20, c20, d20, 21,22,23,24,25,26,27,28,29,exit, undef}; - call_RTLS_abort_task : boolean; - first3: boolean; -- indicates if it is the first pass - first8: boolean; - first27: boolean; - s_unconv : boolean; - mode_2_indicator : boolean; - et_sep_man_initiate : boolean; - emerg_sep : boolean; - cont_3eo_pr_delay : {minus_z_reg1, minus_z_reg2, - minus_z_reg3, minus_z_reg4, minus_z_reg102, 0, 5}; - etsep_y_drift : {undef, minus_z_reg1, minus_z_reg2, - minus_z_reg3, minus_z_reg4, minus_z_reg102, 0}; - fwd_rcs_dump_enable : boolean; - fcs_accept_icnct : boolean; - oms_rcs_i_c_inh_ena_cmd : boolean; - orbiter_dump_ena : boolean; - frz_3eo : boolean; - high_rate_sep: boolean; - entry_gains : boolean; - cont_sep_cplt : boolean; - pch_cmd_reg4 : boolean; - alpha_ok : boolean; - r : {reg-1, reg0, reg1, reg2, reg3, reg4, reg102}; - early_sep : boolean; --------------------------------------------- ------ Additional Variables ----------------- --------------------------------------------- - rtls_lo_f_d_delay : {undef, 0}; - wcb2 : {undef, reg1_0, reg2_neg4, wcb2_3eo, reg4_0, - reg102_undef, post_sep_0}; - q_gcb_i : {undef, quat_reg1, quat_reg2, quat_reg3, quat_reg4, - quat_reg102_undef, quat_entry_M50_to_cmdbody}; - oms_nz_lim : {undef, oms_nz_lim_3eo, oms_nz_lim_iload, oms_nz_lim_std}; - contingency_nz_lim : {undef, contingency_nz_lim_3eo, - contingency_nz_lim_iload, contingency_nz_lim_std}; - - - -ASSIGN - init(entry_gains) := FALSE; - init(frz_3eo) := FALSE; - init(cont_3eo_pr_delay) := 5; - init(etsep_y_drift) := undef; - init(r) := reg-1; - init(step) := undef; - init(call_RTLS_abort_task) := FALSE; - init(first3) := TRUE; - init(first8) := TRUE; - init(first27) := TRUE; - init(cont_sep_cplt) := FALSE; - init(et_sep_man_initiate) := FALSE; - init(alpha_ok) := FALSE; - init(pch_cmd_reg4) := FALSE; - --- Assumed initializations: - - init(rtls_lo_f_d_delay) := undef; - init(wcb2) := undef; - init(q_gcb_i) := undef; - init(oms_nz_lim) := undef; - init(contingency_nz_lim) := undef; - init(oms_rcs_i_c_inh_ena_cmd) := FALSE; - init(orbiter_dump_ena) := FALSE; --- init(early_sep) := FALSE; - -------------- - - next(step) := nextstep; - - next(r) := - case - step = a1 & (cont_3EO_start | mode_select_completed) : r0; - step = 21 & cond_21 : reg4; - step = 23 & ABS_beta_n_GRT_beta_max & !high_rate_sep : reg1; - TRUE : r; - esac; - - next(first3) := - case - step = 3 & cont_3EO_start : FALSE; - TRUE : first3; - esac; - - next(first8) := - case - step = 8 & excess_OMS_propellant & cont_3EO_start : FALSE; - TRUE : first8; - esac; - - next(first27) := - case - step = 27 : FALSE; - TRUE: first27; - esac; - - next(s_unconv) := - case - step = 3 : FALSE; - TRUE : s_unconv; - esac; - - next(call_RTLS_abort_task) := - case - step = 3 : TRUE; - TRUE : call_RTLS_abort_task; - esac; - - next(mode_2_indicator) := - case - step = 4 : TRUE; - TRUE : mode_2_indicator; - esac; - - next(et_sep_man_initiate) := - case - step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102 : TRUE; - step = 14 & pre_sep : TRUE; - step = 19 & q_orb_LT_0 : TRUE; - step = d20 : TRUE; - step = 26 & cond_26 : TRUE; - step = 29 & cond_29 : TRUE; - TRUE : et_sep_man_initiate; - esac; - - next(emerg_sep) := - case - next(step) = 1 : FALSE; - step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102: TRUE; - TRUE : emerg_sep; - esac; - - next(cont_3eo_pr_delay) := - case - next(step) = 1 : 5; - step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102 : - minus_z_reg3; - step = 7 & !cont_minus_z_compl & r = reg102 & - t_nav-t_et_sep_GRT_dt_min_z_102 & - (ABS_q_orb_GRT_q_minus_z_max | ABS_r_orb_GRT_r_minus_z_max) : 0; - step = 14 & pre_sep : minus_z_reg102; - step = 19 & q_orb_LT_0 : minus_z_reg4; - step = d20 : minus_z_reg3; - step = 26 & cond_26 : minus_z_reg2; - step = 27 & first27 : minus_z_reg1; - TRUE : cont_3eo_pr_delay; - esac; - - next(etsep_y_drift) := - case - step = 5 & h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & m_mode != mm102 : - minus_z_reg3; - step = 7 & !cont_minus_z_compl & r = reg102 & - t_nav-t_et_sep_GRT_dt_min_z_102 & - (ABS_q_orb_GRT_q_minus_z_max | ABS_r_orb_GRT_r_minus_z_max) : 0; - step = 14 & pre_sep : minus_z_reg102; - step = 19 & q_orb_LT_0 : minus_z_reg4; - step = d20 : minus_z_reg3; - step = 26 & cond_26 : minus_z_reg2; - step = 27 & first27 : minus_z_reg1; - TRUE : etsep_y_drift; - esac; - - next(fwd_rcs_dump_enable) := - case - step = 8 & excess_OMS_propellant & first8 : FALSE; - TRUE : fwd_rcs_dump_enable; - esac; - - next(fcs_accept_icnct) := - case - step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : TRUE; - TRUE : fcs_accept_icnct; - esac; - - next(oms_rcs_i_c_inh_ena_cmd) := - case --- next(step) = 1 & oms_rcs_i_c_inh_ena_cmd : {0,1}; - next(step) = 1 & oms_rcs_i_c_inh_ena_cmd : FALSE; -- Assumed initialization - step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : TRUE; - TRUE : oms_rcs_i_c_inh_ena_cmd; - esac; - - next(orbiter_dump_ena) := - case - next(start) = TRUE : FALSE; -- Assumed initialization - step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : TRUE; - step = 13 & alt_GRT_alt_min_102_dump & t_nav-t_gmtlo_LT_t_dmp_last : TRUE; - TRUE : orbiter_dump_ena; - esac; - - next(frz_3eo) := - case - next(step) = 1 : FALSE; - step = 10 & entry_mnvr_couter_LE_0 & !rcs_all_jet_inhibit : FALSE; - step = 28 & !et_sep_man_initiate : TRUE; - TRUE : frz_3eo; - esac; - - next(high_rate_sep) := - case - step = 10 & entry_mnvr_couter_LE_0 & !rcs_all_jet_inhibit : FALSE; - step = 25 : TRUE; - TRUE : high_rate_sep; - esac; - - next(entry_gains) := - case - next(step) = 1 : FALSE; - step = 10 & entry_mnvr_couter_LE_0 & !rcs_all_jet_inhibit : TRUE; - TRUE : entry_gains; - esac; - - next(cont_sep_cplt) := - case - next(step) = 1 : FALSE; - step = 12 & mm602_OK : TRUE; - TRUE : cont_sep_cplt; - esac; - - next(pch_cmd_reg4) := - case - next(step) = 1 : FALSE; - step = 18 & !pch_cmd_reg4 & cond_18 : TRUE; - TRUE : pch_cmd_reg4; - esac; - - next(alpha_ok) := - case - next(step) = 1 : FALSE; - step = 20 & ABS_alf_err_LT_alf_sep_err : TRUE; - TRUE : alpha_ok; - esac; - - next(early_sep) := - case - step = 27 & first27 : - case - cond_27 : TRUE; - TRUE : FALSE; - esac; - TRUE : early_sep; - esac; - --------------------------------------------- ------ Additional Variables ----------------- --------------------------------------------- - - next(rtls_lo_f_d_delay) := - case - next(start) = TRUE : undef; -- Assumed initialization - step = 8 & first8 & excess_OMS_propellant : 0; - TRUE : rtls_lo_f_d_delay; - esac; - - next(wcb2) := - case - next(start) = TRUE : undef; -- Assumed initialization - step = 10 & entry_mnvr_couter_LE_0 : post_sep_0; - step = 12 : case - r = reg4 : reg4_0; - TRUE : wcb2_3eo; - esac; - step = 14 & pre_sep : reg102_undef; - step = 15 : case - r = reg4 : reg4_0; - TRUE : wcb2_3eo; - esac; - step = 25 : reg2_neg4; - TRUE : wcb2; - esac; - - next(q_gcb_i) := - case - next(start) = TRUE : undef; -- Assumed initialization - step = 11 : quat_entry_M50_to_cmdbody; - step = 14 & pre_sep : quat_reg102_undef; - step = 16 : case - r = reg4 : quat_reg4; - TRUE : quat_reg3; - esac; - step = 22 : quat_reg2; - --- Without this step the value "quat_reg2" would remain in "reg1": --- step = 23 & ABS_beta_n_GRT_beta_max & !high_rate_sep : undef; - - TRUE : q_gcb_i; - esac; - - next(oms_nz_lim) := - case - next(start) = TRUE : undef; -- Assumed initialization - step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : oms_nz_lim_3eo; - step = 12 & mm602_OK : oms_nz_lim_std; - TRUE : oms_nz_lim; - esac; - - next(contingency_nz_lim) := - case - next(start) = TRUE : undef; -- Assumed initialization - step = 9 & q_bar_a_LT_qbar_oms_dump & r != reg102 : - contingency_nz_lim_3eo; - step = 12 & mm602_OK : contingency_nz_lim_std; - TRUE : contingency_nz_lim; - esac; - -DEFINE - finished := step = exit; - idle := step = undef; - - start_cont_3eo_mode_select := - case - step = 1 & !cont_3EO_start : TRUE; - TRUE : FALSE; - esac; - - nextstep := - case - step = 1 : a1; - step = a1 : case - (cont_3EO_start | mode_select_completed) : 2; - TRUE : step; - esac; - step = 2 : case - !cont_3EO_start : exit; - first3 : 3; - TRUE: 4; - esac; - step = 3 : 4; - step = 4 : case - et_sep_cmd : 7; - TRUE : 5; - esac; - step = 5 : case - h_dot_LT_0 & q_bar_a_GRT_qbar_max_sep & - m_mode != mm102 : exit; - TRUE : 6; - esac; - step = 6 : - case - r = reg102 : 13; - r in {reg3, reg4} : 15; - r = reg2 : 22; - r = reg1 : 27; - TRUE : exit; - esac; - step = 7 : case - cont_minus_z_compl : 8; - TRUE : exit; - esac; - step = 8 : case - excess_OMS_propellant & first8 : 9; - TRUE : 10; - esac; - step = 9 : exit; - step = 10 : case - !entry_mnvr_couter_LE_0 | rcs_all_jet_inhibit : exit; - TRUE : 11; - esac; - step = 11 : 12; - step = 12 : exit; - step = 13 : 14; - step = 14 : exit; - step = 15 : 16; - step = 16 : 17; - step = 17 : case - r = reg4 : 18; - TRUE : 20; - esac; - step = 18 : case - pch_cmd_reg4 | cond_18 : 19; - TRUE : exit; - esac; - step = 19 : exit; - step = 20 : case - ABS_alf_err_LT_alf_sep_err : b20; - TRUE : c20; - esac; - step = b20 : case - cond_20b : d20; - TRUE : exit; - esac; - step = c20 : case - alpha_ok : d20; - TRUE : 21; - esac; - step = d20 : exit; - TRUE : nextstep21; - esac; - - nextstep21 := - case - step = 21 : case - cond_21 : 15; - TRUE : exit; - esac; - step = 22 : 23; - step = 23 : case - ABS_beta_n_GRT_beta_max & !high_rate_sep : 27; - TRUE : 24; - esac; - step = 24 : case - cond_24 | high_rate_sep : 25; - TRUE : exit; - esac; - step = 25 : 26; - step = 26 : exit; - step = 27 : 28; - step = 28 : case - !et_sep_man_initiate : 29; - TRUE : exit; - esac; - step = 29 : exit; - start : 1; - step = exit : undef; - TRUE : step; - esac; - - post_sep_mode := step in {7,8,9,10,11,12}; - ------------------------------------------------------------------- ------------------------------------------------------------------- - -MODULE main -VAR - smode5: boolean; - vel : {GRT_vi_3eo_max, GRT_vi_3eo_min, LEQ_vi_3eo_min}; - q_bar: {GRT_qbar_reg3, GRT_qbar_reg1, LEQ_qbar_reg1}; - q_bar_a_GRT_qbar_max_sep : boolean; - q_bar_a_LT_qbar_oms_dump : boolean; - apogee_alt_LT_alt_ref : boolean; - h_dot_LT_hdot_reg2 : boolean; - h_dot_LT_0 : boolean; - alpha_n_GRT_alpha_reg2 : boolean; - delta_r_GRT_del_r_usp : boolean; - v_horiz_dnrng_LT_0: boolean; - meco_confirmed: boolean; - et_sep_cmd : boolean; - cont_minus_z_compl : boolean; - t_nav-t_et_sep_GRT_dt_min_z_102 : boolean; - ABS_q_orb_GRT_q_minus_z_max : boolean; - ABS_r_orb_GRT_r_minus_z_max : boolean; - excess_OMS_propellant : boolean; - entry_mnvr_couter_LE_0 : boolean; - rcs_all_jet_inhibit : boolean; - alt_GRT_alt_min_102_dump : boolean; - t_nav-t_gmtlo_LT_t_dmp_last : boolean; - pre_sep : boolean; - cond_18 : boolean; - q_orb_LT_0 : boolean; - ABS_alf_err_LT_alf_sep_err : boolean; - cond_20b : boolean; - cond_21 : boolean; - ABS_beta_n_GRT_beta_max : boolean; - cond_24 : boolean; - cond_26 : boolean; - cond_27 : boolean; - cond_29 : boolean; - mm602_OK : boolean; - start_guide : boolean; - mated_coast_mnvr : boolean; - - cs: cont_3eo_mode_select(cg.start_cont_3eo_mode_select, - smode5,vel,q_bar,apogee_alt_LT_alt_ref, - h_dot_LT_hdot_reg2,alpha_n_GRT_alpha_reg2, - delta_r_GRT_del_r_usp,v_horiz_dnrng_LT_0, - cg.high_rate_sep,meco_confirmed); - - cg: cont_3eo_guide(start_guide, - cs.cont_3EO_start, cs.region_selected, et_sep_cmd, - h_dot_LT_0, q_bar_a_GRT_qbar_max_sep, cs.m_mode, cs.r, - cont_minus_z_compl, t_nav-t_et_sep_GRT_dt_min_z_102, - ABS_q_orb_GRT_q_minus_z_max, ABS_r_orb_GRT_r_minus_z_max, - excess_OMS_propellant, q_bar_a_LT_qbar_oms_dump, - entry_mnvr_couter_LE_0, rcs_all_jet_inhibit, - alt_GRT_alt_min_102_dump, t_nav-t_gmtlo_LT_t_dmp_last, - pre_sep, cond_18, q_orb_LT_0, ABS_alf_err_LT_alf_sep_err, - cond_20b, cond_21, ABS_beta_n_GRT_beta_max, cond_24, cond_26, - cond_27, cond_29, mm602_OK); - -ASSIGN - init(start_guide) := FALSE; - init(mated_coast_mnvr) := FALSE; - - next(entry_mnvr_couter_LE_0) := - case - !entry_mnvr_couter_LE_0 : {FALSE, TRUE}; - TRUE : TRUE; - esac; - ---------------------------------------------------------------------- ---------------------------------------------------------------------- - next(start_guide) := - case - start_guide : FALSE; - !cg.idle : FALSE; - TRUE : {FALSE, TRUE}; - esac; - - next(smode5) := - case - fixed_values : smode5; - cg.idle : { FALSE, TRUE }; - TRUE : smode5; - esac; - - next(vel) := - case - fixed_values : vel; - cg.idle : {GRT_vi_3eo_max, GRT_vi_3eo_min, LEQ_vi_3eo_min}; - TRUE : vel; - esac; - - next(q_bar) := - case - fixed_values : q_bar; - cg.idle : {GRT_qbar_reg3, GRT_qbar_reg1, LEQ_qbar_reg1}; - TRUE : q_bar; - esac; - - next(q_bar_a_GRT_qbar_max_sep) := - case - fixed_values : q_bar_a_GRT_qbar_max_sep; - cg.idle : { FALSE, TRUE }; - TRUE : q_bar_a_GRT_qbar_max_sep; - esac; - - next(apogee_alt_LT_alt_ref) := - case - fixed_values : apogee_alt_LT_alt_ref; - cg.idle : { FALSE, TRUE }; - TRUE : apogee_alt_LT_alt_ref; - esac; - - next(h_dot_LT_hdot_reg2) := - case - fixed_values : h_dot_LT_hdot_reg2; - cg.idle : { FALSE, TRUE }; - TRUE : h_dot_LT_hdot_reg2; - esac; - - next(h_dot_LT_0) := - case - fixed_values : h_dot_LT_0; - cg.idle : { FALSE, TRUE }; - TRUE : h_dot_LT_0; - esac; - - next(alpha_n_GRT_alpha_reg2) := - case - fixed_values : alpha_n_GRT_alpha_reg2; - cg.idle : { FALSE, TRUE }; - TRUE : alpha_n_GRT_alpha_reg2; - esac; - - next(delta_r_GRT_del_r_usp) := - case - fixed_values : delta_r_GRT_del_r_usp; - cg.idle : { FALSE, TRUE }; - TRUE : delta_r_GRT_del_r_usp; - esac; - - next(v_horiz_dnrng_LT_0) := - case - fixed_values : v_horiz_dnrng_LT_0; - cg.idle : { FALSE, TRUE }; - TRUE : v_horiz_dnrng_LT_0; - esac; - - next(meco_confirmed) := - case - fixed_values : meco_confirmed; - meco_confirmed : TRUE; - cg.idle : { FALSE, TRUE }; - TRUE : meco_confirmed; - esac; - - next(et_sep_cmd) := - case - fixed_values : et_sep_cmd; - et_sep_cmd : TRUE; - cg.idle : { FALSE, TRUE }; - TRUE : et_sep_cmd; - esac; - - next(cont_minus_z_compl) := - case - fixed_values : cont_minus_z_compl; - cg.idle : { FALSE, TRUE }; - TRUE : cont_minus_z_compl; - esac; - - next(t_nav-t_et_sep_GRT_dt_min_z_102) := - case - fixed_values : t_nav-t_et_sep_GRT_dt_min_z_102; - cg.idle : { FALSE, TRUE }; - TRUE : t_nav-t_et_sep_GRT_dt_min_z_102; - esac; - - next(ABS_q_orb_GRT_q_minus_z_max) := - case - fixed_values : ABS_q_orb_GRT_q_minus_z_max; - cg.idle : { FALSE, TRUE }; - TRUE : ABS_q_orb_GRT_q_minus_z_max; - esac; - - next(ABS_r_orb_GRT_r_minus_z_max) := - case - fixed_values : ABS_r_orb_GRT_r_minus_z_max; - cg.idle : { FALSE, TRUE }; - TRUE : ABS_r_orb_GRT_r_minus_z_max; - esac; - - next(excess_OMS_propellant) := - case - fixed_values : excess_OMS_propellant; - cg.idle & excess_OMS_propellant : { FALSE, TRUE }; - TRUE : excess_OMS_propellant; - esac; - - next(q_bar_a_LT_qbar_oms_dump) := - case - fixed_values : q_bar_a_LT_qbar_oms_dump; - cg.idle : { FALSE, TRUE }; - TRUE : q_bar_a_LT_qbar_oms_dump; - esac; - - next(rcs_all_jet_inhibit) := - case - fixed_values : rcs_all_jet_inhibit; - cg.idle : { FALSE, TRUE }; - TRUE : rcs_all_jet_inhibit; - esac; - - next(alt_GRT_alt_min_102_dump) := - case - fixed_values : alt_GRT_alt_min_102_dump; - cg.idle : { FALSE, TRUE }; - TRUE : alt_GRT_alt_min_102_dump; - esac; - - next(t_nav-t_gmtlo_LT_t_dmp_last) := - case - fixed_values : t_nav-t_gmtlo_LT_t_dmp_last; - cg.idle : { FALSE, TRUE }; - TRUE : t_nav-t_gmtlo_LT_t_dmp_last; - esac; - - next(pre_sep) := - case - fixed_values : pre_sep; - cg.idle : { FALSE, TRUE }; - TRUE : pre_sep; - esac; - - next(cond_18) := - case - fixed_values : cond_18; - cg.idle : { FALSE, TRUE }; - TRUE : cond_18; - esac; - - next(q_orb_LT_0) := - case - fixed_values : q_orb_LT_0; - cg.idle : { FALSE, TRUE }; - TRUE : q_orb_LT_0; - esac; - - next(ABS_alf_err_LT_alf_sep_err) := - case - fixed_values : ABS_alf_err_LT_alf_sep_err; - cg.idle : { FALSE, TRUE }; - TRUE : ABS_alf_err_LT_alf_sep_err; - esac; - - next(cond_20b) := - case - fixed_values : cond_20b; - cg.idle : { FALSE, TRUE }; - TRUE : cond_20b; - esac; - - next(cond_21) := - case - fixed_values : cond_21; - cg.idle : { FALSE, TRUE }; - TRUE : cond_21; - esac; - - next(ABS_beta_n_GRT_beta_max) := - case - fixed_values : ABS_beta_n_GRT_beta_max; - cg.idle : { FALSE, TRUE }; - TRUE : ABS_beta_n_GRT_beta_max; - esac; - - next(cond_24) := - case - fixed_values : cond_24; - cg.idle : { FALSE, TRUE }; - TRUE : cond_24; - esac; - - next(cond_26) := - case - fixed_values : cond_26; - cg.idle : { FALSE, TRUE }; - TRUE : cond_26; - esac; - - next(cond_27) := - case - fixed_values : cond_27; - cg.idle : { FALSE, TRUE }; - TRUE : cond_27; - esac; - - next(cond_29) := - case - fixed_values : cond_29; - cg.idle : { FALSE, TRUE }; - TRUE : cond_29; - esac; - - next(mm602_OK) := - case - fixed_values : mm602_OK; - cg.idle : { FALSE, TRUE }; - TRUE : mm602_OK; - esac; - - next(mated_coast_mnvr) := - case - next(cg.step) = 1 : FALSE; - cg.step = 6 & cg.r in {reg1, reg2, reg3, reg4, reg102} : TRUE; - TRUE : mated_coast_mnvr; - esac; - ---------------------------------------------------------------------- ---------------------------------------------------------------------- -DEFINE - fixed_values := FALSE; - - output_ok := - case - cg.q_gcb_i = undef | cg.wcb2 = undef | - cg.cont_3eo_pr_delay = 5 | - cg.etsep_y_drift = undef : - case - !mated_coast_mnvr: 1; - TRUE : undef; - esac; - !mated_coast_mnvr: toint(cg.q_gcb_i = quat_entry_M50_to_cmdbody & - cg.wcb2 = post_sep_0); --- reg1 never happens? --- cg.r = reg1 : (cg.q_gcb_i = quat_reg1 & cg.wcb2 = reg1_0 & --- cg.cont_3eo_pr_delay = minus_z_reg1 & --- cg.etsep_y_drift = minus_z_reg1) | cg.emerg_sep; - cg.r = reg2 : toint((cg.q_gcb_i = quat_reg2 & cg.wcb2 = reg2_neg4 & - cg.cont_3eo_pr_delay = minus_z_reg2 & - cg.etsep_y_drift = minus_z_reg2) | cg.emerg_sep); - - cg.r = reg3 : toint((cg.q_gcb_i = quat_reg3 & cg.wcb2 = wcb2_3eo & - cg.cont_3eo_pr_delay = minus_z_reg3 & - cg.etsep_y_drift = minus_z_reg3) | cg.emerg_sep); - cg.r = reg4 : toint((cg.q_gcb_i = quat_reg4 & cg.wcb2 = reg4_0 & - cg.cont_3eo_pr_delay = minus_z_reg4 & - cg.etsep_y_drift = minus_z_reg4) | cg.emerg_sep); - cg.r = reg102 : toint((cg.q_gcb_i = quat_reg102_undef & - cg.wcb2 = reg102_undef & - cg.cont_3eo_pr_delay = minus_z_reg102 & - cg.etsep_y_drift = minus_z_reg102) | cg.emerg_sep); - TRUE : 0; - esac; - ---------------------------------------------------------------------- --------- Specifications --------------------------------------------- ---------------------------------------------------------------------- - --- Contingency Guide terminates - -SPEC AG(!cg.idle -> AF(cg.finished)) - --- Contingency guide can be executed infinitely often - -SPEC AG( (cg.idle | cg.finished) -> - EF(!(cg.idle | cg.finished) & EF(cg.finished))) - --- Contingency mode select task works fine - -SPEC AG(cs.cont_3EO_start & cs.region_selected -> - ((cs.m_mode = mm102 | meco_confirmed) & - cs.r != reg-1 & cs.r != reg0)) - --- Bad (initial) value never happens again once region is computed --- unless we restart the task - ---SPEC AG(cs.r != reg-1 -> !E[!cg.start_cont_3eo_mode_select U --- cs.r = reg-1 & !cg.start_cont_3eo_mode_select]) - --- Comment out each of the regions and see if this is still true --- (Check, if ALL of the regions can happen) - ---SPEC AG(cs.r in {reg-1 --- ,reg0 --- ,reg1 --- ,reg2 --- ,reg3 --- ,reg102 --- }) - --- Comment out each of the regions and see if this is still true --- (Check, if ALL of the regions can happen) - ---SPEC AG(cg.r in {reg-1 --- ,reg0 --- ,reg1 --- ,reg2 --- ,reg3 --- ,reg4 --- ,reg102 --- }) - --- Mode_select starts at the next step after its "start" bit is set: - ---SPEC AG(!cg.start_cont_3eo_mode_select -> --- AX(cg.start_cont_3eo_mode_select & cs.step in {exit, undef} -> --- AX(cs.step = 1 & !cs.region_selected))) - --- During major mode 103, the inertial velocity is monitored. --- Below an I-loaded velocity, a MECO would constitute a contingency --- abort. (Must NOT be in SMODE=5 (??)) - -SPEC AG(cg.start_cont_3eo_mode_select & cs.m_mode = mm103 & - vel = LEQ_vi_3eo_min & meco_confirmed & !smode5 -> - A[!cs.region_selected U cs.region_selected & cs.cont_3EO_start]) - --- Above a certain inertial velocity (in mode 103), the 3E/O field --- is blanked, indicating that a MECO at this point would not require --- an OPS 6 contingency abort. - -SPEC AG(cs.region_selected -> - (cs.m_mode = mm103 & vel = GRT_vi_3eo_max -> !cs.cont_3EO_start)) - --- Between the two velocities, an apogee altitude - velocity curve is --- constructed based on the current inertial velocity. If the apogee --- altitude is above this curve, a contingency abort capability is --- still required and a 3E/O region index will be calculated. --- Otherwise, the 3E/O field is blanked out and no further contingency --- abort calculations will be performed. (Must NOT be in SMODE=5 (??)) - -SPEC AG(cg.start_cont_3eo_mode_select & cs.m_mode = mm103 & - vel = GRT_vi_3eo_min & meco_confirmed & !smode5 -> - A[!cs.region_selected U cs.region_selected & - apogee_alt_LT_alt_ref = !cs.cont_3EO_start]) - --- For an RTLS trajectory (SMODE=5), a check is made on the downrange --- velocity to see if the vehicle is heading away from the landing site. --- If this is the case, a 3E/O region index is calculated. If the vehicle --- is heading back to the landing site, and the current range to the MECO --- R-V line is greater than an I-loaded value, a 3E/O region index is --- calculated. Otherwise, an intact abort is possible and the 3E/O field --- is blanked. - -SPEC AG(cg.start_cont_3eo_mode_select & smode5 & meco_confirmed & - (!v_horiz_dnrng_LT_0 | !delta_r_GRT_del_r_usp) -> - A[!cs.region_selected U cs.region_selected & cs.cont_3EO_start]) - --- If this task is called prior to SRB separation [mm102], the 3E/O region --- index is set to 102 and the 3E/O contingency flag is set. - -SPEC AG(cs.m_mode = mm102 & cg.start_cont_3eo_mode_select -> - AX (A [ !cs.region_selected U cs.region_selected & - cs.r = reg102 & cs.cont_3EO_start])) - --- After SRB separation, on every pass that the 3E/O region index is --- calculated, a check is made to see if MECO confirmed has occured. If --- so, a check is made to see if the major mode is 103. If so, an RTLS is --- automatically invoked to transition to major mode 601. - -SPEC AG(!cs.region_selected & cs.m_mode = mm103 & meco_confirmed -> - A[!cs.region_selected U cs.region_selected & cs.r != reg0 -> - cs.m_mode = mm601 & cs.RTLS_abort_declared]) - --- Once the 3E/O contingency flag has been set, this task is no longer --- executed. - -SPEC AG(cs.cont_3EO_start -> AG(!cg.start_cont_3eo_mode_select)) - --- If MECO confirmed occurs in MM103 and an OPS 6 contingency abort --- procedure is still required, contingency 3E/O guidance sets the --- CONT_3EO_START flag ON. Contingency 3E/O guidance then switches --- from its display support function into an actual auto guidance --- steering process. [...] Contingency 3E/O guidance sets the RTLS abort --- declared flag and the MSC performs the transition from from major mode --- 103 to 601. - -SPEC AG(!cg.idle & !cg.finished & !cs.region_selected & cs.m_mode = mm103 -> - A[ !cg.finished U cg.finished & cs.region_selected & - (cs.cont_3EO_start -> cs.m_mode = mm601 & cs.RTLS_abort_declared) ]) - --- If MECO confirmed occurs in a major mode 601 and a contingency abort --- procedure is still required, contingency 3E/O guidance sets the --- CONT_3EO_START flag ON. [...] Contingency 3E/O guidance then commands --- 3E/O auto maneuvers in major mode 601. [What are these maneuvers??] - -SPEC AG(cg.finished & cs.m_mode = mm601 & !et_sep_cmd & - meco_confirmed & cs.cont_3EO_start -> - cg.q_gcb_i in {quat_reg1, quat_reg2, quat_reg3, quat_reg4, undef} - | cg.emerg_sep) - --- If MECO confirmed occurs in a first stage (MM102) [...], contingency --- 3E/O guidance will command a fast ET separation during SRB tailoff in --- major mode 102. CONT 3E/O GUID will then command maneuver post-sep in --- MM601 (???). [ I'm not sure what indicates fast ET sep.: emerg_sep or --- early_sep, or what? ] - -SPEC AG(cg.finished & cs.m_mode = mm102 & meco_confirmed & pre_sep -> - cg.emerg_sep | et_sep_cmd - | cg.et_sep_man_initiate - | cg.early_sep - ) - ---------------------------------------------- --- Invariants from Murphi code -------------- ---------------------------------------------- - ---SPEC AG(cg.finished -> (output_ok != 0 | (output_ok = undef & --- (cg.emerg_sep | !cg.cont_sep_cplt)))) - ---SPEC AG(!cg.finished & !cg.idle -> !mated_coast_mnvr | !et_sep_cmd) - --- Stronger version !!! - -SPEC AG(cg.finished -> output_ok != 0) - --- Contingency Guidance shall command an ET separation --- [under certain conditions :-]. - -SPEC AG(cs.cont_3EO_start & cg.finished & - (cg.r = reg1 -> cond_29) & - (cg.r = reg2 -> cond_24 & cond_26) & - (cg.r = reg3 -> cg.alpha_ok & - (ABS_alf_err_LT_alf_sep_err -> cond_20b)) & - (cg.r = reg4 -> cond_18 & q_orb_LT_0) & - (cg.r = reg102 -> pre_sep) -> - et_sep_cmd | cg.et_sep_man_initiate - | cg.early_sep - | cg.emerg_sep - ) - --- Contingency Guidance shall command at most one interconnected OMS dump. - -SPEC AG(cg.finished & cg.oms_rcs_i_c_inh_ena_cmd -> - AG(!cg.oms_rcs_i_c_inh_ena_cmd -> AG(!cg.oms_rcs_i_c_inh_ena_cmd))) - --- Contingency Guidance shall command a transition to glide RTLS --- (flight mode 602) - -SPEC AG(cg.finished & cs.m_mode = mm601 -> - --cg.cont_sep_cplt | cg.emerg_sep | - cg.call_RTLS_abort_task) - --- Paper, p. 28, unstated assumption 2: at step 6 the region is --- among 102, 1-4. - -SPEC AG(cg.step = 6 -> cg.r in {reg102, reg1, reg2, reg3, reg4}) - --- The transition to mode 602 shall not occur until the entry maneuver --- has been calculated - -SPEC !E[cg.q_gcb_i = undef U cg.cont_sep_cplt & cg.q_gcb_i = undef] - --- The entry maneuver calculations shall not commence until the OMS/RCS --- interconnect, if any, is complete (??? What does it exactly mean???) --- !!! ---SPEC AG(cg.oms_rcs_i_c_inh_ena_cmd -> --- !E[cg.oms_rcs_i_c_inh_ena_cmd U --- cg.q_gcb_i != undef & cg.oms_rcs_i_c_inh_ena_cmd]) - -SPEC AG(cg.oms_rcs_i_c_inh_ena_cmd -> - !E[rcs_all_jet_inhibit U - cg.q_gcb_i != undef & rcs_all_jet_inhibit]) - --- The OMS dump shall not be considered until the -Z translation is complete. - -SPEC !E[!cont_minus_z_compl & cg.r != reg102 U cg.orbiter_dump_ena] - --- Completion of -Z translation shall not be checked until ET separation --- has been commanded - -SPEC !E[!et_sep_cmd U cg.step = 7] - --- ET separation shall be commanded if and only if an abort maneuver --- region is assigned [and again there are *certain conditions*]. - -SPEC AG(cg.finished & cs.cont_3EO_start & - (cg.r = reg1 -> cond_29) & - (cg.r = reg2 -> cond_24 & cond_26) & - (cg.r = reg3 -> cg.alpha_ok & - (ABS_alf_err_LT_alf_sep_err -> cond_20b)) & - (cg.r = reg4 -> cond_18 & q_orb_LT_0) & - (cg.r = reg102 -> pre_sep) -> - (cg.et_sep_man_initiate | et_sep_cmd - <-> cg.r in {reg1, reg2, reg3, reg4, reg102})) - --- The assigned region can not change arbitrarily. - --- Regions 1 and 2 may interchange, but will not switch to any other region: - -SPEC AG(cg.finished & cs.cont_3EO_start & cg.r in {reg1,reg2} -> - AG(cg.finished -> cg.r in {reg1,reg2})) - --- Regions 3 and 4 may interchange, but will not switch to any other region: - -SPEC AG(cg.finished & cs.cont_3EO_start & cg.r in {reg3,reg4} -> - AG(cg.finished -> cg.r in {reg3,reg4})) - --- Region 102 never changes: - -SPEC AG(cg.finished & cg.r = reg102 -> AG(cg.finished -> cg.r = reg102)) diff --git a/tests/examplefiles/hash_syntax.rb b/tests/examplefiles/hash_syntax.rb deleted file mode 100644 index 35b27723..00000000 --- a/tests/examplefiles/hash_syntax.rb +++ /dev/null @@ -1,5 +0,0 @@ -{ :old_syntax => 'ok' } -{ 'stings as key' => 'should be ok' } -{ new_syntax: 'broken until now' } -{ withoutunderscore: 'should be ok' } -{ _underscoreinfront: 'might be ok, if I understand the pygments code correct' } diff --git a/tests/examplefiles/hello-world.puzzlet.aheui b/tests/examplefiles/hello-world.puzzlet.aheui deleted file mode 100644 index e7ef3a62..00000000 --- a/tests/examplefiles/hello-world.puzzlet.aheui +++ /dev/null @@ -1,10 +0,0 @@ -밤밣따빠밣밟따뿌 -빠맣파빨받밤뚜뭏 -돋밬탕빠맣붏두붇 -볻뫃박발뚷투뭏붖 -뫃도뫃희멓뭏뭏붘 -뫃봌토범더벌뿌뚜 -뽑뽀멓멓더벓뻐뚠 -뽀덩벐멓뻐덕더벅 - -https://github.com/aheui/snippets/blob/master/hello-world/hello-world.puzzlet.aheui diff --git a/tests/examplefiles/hello.at b/tests/examplefiles/hello.at deleted file mode 100644 index 23af2f2d..00000000 --- a/tests/examplefiles/hello.at +++ /dev/null @@ -1,6 +0,0 @@ -def me := object: { - def name := "Kevin"; - def sayHello(peerName) { - system.println(peerName + " says hello!"); - }; -}; diff --git a/tests/examplefiles/hello.golo b/tests/examplefiles/hello.golo deleted file mode 100644 index 7e8ca214..00000000 --- a/tests/examplefiles/hello.golo +++ /dev/null @@ -1,5 +0,0 @@ -module hello.World - -function main = |args| { - println("Hello world!") -} diff --git a/tests/examplefiles/hello.lsl b/tests/examplefiles/hello.lsl deleted file mode 100644 index 61697e7f..00000000 --- a/tests/examplefiles/hello.lsl +++ /dev/null @@ -1,12 +0,0 @@ -default -{ - state_entry() - { - llSay(0, "Hello, Avatar!"); - } - - touch_start(integer total_number) - { - llSay(0, "Touched."); - } -} diff --git a/tests/examplefiles/hello.smali b/tests/examplefiles/hello.smali deleted file mode 100644 index e539f00e..00000000 --- a/tests/examplefiles/hello.smali +++ /dev/null @@ -1,40 +0,0 @@ -# To Recreate: -# -# echo -e 'class hello {\n public static void main(String[] args) {\n -# System.out.println("hi");\n }\n}\n' > hello.java -# javac -target 1.4 -source 1.4 hello.java -# dx --dex --output=hello.dex hello.class -# baksmali hello.dex -# cat out/hello.smali - -.class Lhello; -.super Ljava/lang/Object; -.source "hello.java" - - -# direct methods -.method constructor ()V - .registers 1 - - .prologue - .line 1 - invoke-direct {p0}, Ljava/lang/Object;->()V - - return-void -.end method - -.method public static main([Ljava/lang/String;)V - .registers 3 - .parameter - - .prologue - .line 3 - sget-object v0, Ljava/lang/System;->out:Ljava/io/PrintStream; - - const-string v1, "hi" - - invoke-virtual {v0, v1}, Ljava/io/PrintStream;->println(Ljava/lang/String;)V - - .line 4 - return-void -.end method diff --git a/tests/examplefiles/hello.sp b/tests/examplefiles/hello.sp deleted file mode 100644 index 7102d273..00000000 --- a/tests/examplefiles/hello.sp +++ /dev/null @@ -1,9 +0,0 @@ -#include - -// Single line comment -/* Multi line - comment */ - -public OnPluginStart() { - PrintToServer("Hello."); -} diff --git a/tests/examplefiles/hexdump_debugexe b/tests/examplefiles/hexdump_debugexe deleted file mode 100644 index 31fefdb7..00000000 --- a/tests/examplefiles/hexdump_debugexe +++ /dev/null @@ -1,309 +0,0 @@ -0000:0000 2F 2F 20 43 72 65 61 74-65 64 20 62 79 20 4C 69 // Created by Li -0000:0010 6F 6E 65 6C 6C 6F 20 4C-75 6E 65 73 75 20 61 6E onello Lunesu an -0000:0020 64 20 70 6C 61 63 65 64-20 69 6E 20 74 68 65 20 d placed in the -0000:0030 70 75 62 6C 69 63 20 64-6F 6D 61 69 6E 2E 0A 2F public domain.◙/ -0000:0040 2F 20 54 68 69 73 20 66-69 6C 65 20 68 61 73 20 / This file has -0000:0050 62 65 65 6E 20 6D 6F 64-69 66 69 65 64 20 66 72 been modified fr -0000:0060 6F 6D 20 69 74 73 20 6F-72 69 67 69 6E 61 6C 20 om its original -0000:0070 76 65 72 73 69 6F 6E 2E-0A 2F 2F 20 49 74 20 68 version.◙// It h -0000:0080 61 73 20 62 65 65 6E 20-66 6F 72 6D 61 74 74 65 as been formatte -0000:0090 64 20 74 6F 20 66 69 74-20 79 6F 75 72 20 73 63 d to fit your sc -0000:00A0 72 65 65 6E 2E 0A 6D 6F-64 75 6C 65 20 70 68 6F reen.◙module pho -0000:00B0 6E 65 6E 6F 3B 20 20 20-20 20 2F 2F 20 6F 70 74 neno; // opt -0000:00C0 69 6F 6E 61 6C 0A 69 6D-70 6F 72 74 20 73 74 64 ional◙import std -0000:00D0 2E 73 74 64 69 6F 3B 20-20 20 2F 2F 20 77 72 69 .stdio; // wri -0000:00E0 74 65 66 6C 6E 20 20 20-20 20 0A 69 6D 70 6F 72 tefln ◙impor -0000:00F0 74 20 73 74 64 2E 63 74-79 70 65 3B 20 20 20 2F t std.ctype; / -0000:0100 2F 20 69 73 64 69 67 69-74 20 20 20 20 20 0A 69 / isdigit ◙i -0000:0110 6D 70 6F 72 74 20 73 74-64 2E 73 74 72 65 61 6D mport std.stream -0000:0120 3B 20 20 2F 2F 20 42 75-66 66 65 72 65 64 46 69 ; // BufferedFi -0000:0130 6C 65 0A 0A 2F 2F 20 4A-75 73 74 20 66 6F 72 20 le◙◙// Just for -0000:0140 72 65 61 64 61 62 69 6C-69 74 79 20 28 69 6D 61 readability (ima -0000:0150 67 69 6E 65 20 63 68 61-72 5B 5D 5B 5D 5B 63 68 gine char[][][ch -0000:0160 61 72 5B 5D 5D 29 20 20-20 20 0A 61 6C 69 61 73 ar[]]) ◙alias -0000:0170 20 63 68 61 72 5B 5D 20-73 74 72 69 6E 67 3B 0A char[] string;◙ -0000:0180 61 6C 69 61 73 20 73 74-72 69 6E 67 5B 5D 20 73 alias string[] s -0000:0190 74 72 69 6E 67 61 72 72-61 79 3B 0A 0A 2F 2F 2F tringarray;◙◙/// -0000:01A0 20 53 74 72 69 70 73 20-6E 6F 6E 2D 64 69 67 69 Strips non-digi -0000:01B0 74 20 63 68 61 72 61 63-74 65 72 73 20 66 72 6F t characters fro -0000:01C0 6D 20 74 68 65 20 73 74-72 69 6E 67 20 28 43 4F m the string (CO -0000:01D0 57 29 0A 73 74 72 69 6E-67 20 73 74 72 69 70 4E W)◙string stripN -0000:01E0 6F 6E 44 69 67 69 74 28-20 69 6E 20 73 74 72 69 onDigit( in stri -0000:01F0 6E 67 20 6C 69 6E 65 20-29 20 0A 7B 0A 20 20 20 ng line ) ◙{◙ -0000:0200 20 73 74 72 69 6E 67 20-72 65 74 3B 0A 20 20 20 string ret;◙ -0000:0210 20 66 6F 72 65 61 63 68-28 75 69 6E 74 20 69 2C foreach(uint i, -0000:0220 20 63 3B 20 6C 69 6E 65-29 20 7B 0A 20 20 20 20 c; line) {◙ -0000:0230 20 20 20 20 2F 2F 20 45-72 72 6F 72 3A 20 73 74 // Error: st -0000:0240 64 2E 63 74 79 70 65 2E-69 73 64 69 67 69 74 20 d.ctype.isdigit -0000:0250 61 74 20 43 3A 5C 64 6D-64 5C 73 72 63 5C 70 68 at C:\dmd\src\ph -0000:0260 6F 62 6F 73 5C 73 74 64-5C 63 74 79 70 65 2E 64 obos\std\ctype.d -0000:0270 28 33 37 29 20 0A 20 20-20 20 20 20 20 20 2F 2F (37) ◙ // -0000:0280 20 63 6F 6E 66 6C 69 63-74 73 20 77 69 74 68 20 conflicts with -0000:0290 73 74 64 2E 73 74 72 65-61 6D 2E 69 73 64 69 67 std.stream.isdig -0000:02A0 69 74 20 61 74 20 43 3A-5C 64 6D 64 5C 73 72 63 it at C:\dmd\src -0000:02B0 5C 70 68 6F 62 6F 73 5C-73 74 64 5C 73 74 72 65 \phobos\std\stre -0000:02C0 61 6D 2E 64 28 32 39 32-34 29 0A 20 20 20 20 20 am.d(2924)◙ -0000:02D0 20 20 20 69 66 20 28 21-73 74 64 2E 63 74 79 70 if (!std.ctyp -0000:02E0 65 2E 69 73 64 69 67 69-74 28 63 29 29 20 7B 0A e.isdigit(c)) {◙ -0000:02F0 20 20 20 20 20 20 20 20-20 20 20 20 69 66 20 28 if ( -0000:0300 21 72 65 74 29 0A 20 20-20 20 20 20 20 20 20 20 !ret)◙ -0000:0310 20 20 20 20 20 20 72 65-74 20 3D 20 6C 69 6E 65 ret = line -0000:0320 5B 30 2E 2E 69 5D 3B 20-20 20 20 0A 20 20 20 20 [0..i]; ◙ -0000:0330 20 20 20 20 7D 20 20 20-20 0A 20 20 20 20 20 20 } ◙ -0000:0340 20 20 65 6C 73 65 20 69-66 20 28 72 65 74 29 0A else if (ret)◙ -0000:0350 20 20 20 20 20 20 20 20-20 20 20 20 72 65 74 20 ret -0000:0360 7E 3D 20 63 3B 20 20 20-20 0A 20 20 20 20 7D 20 ~= c; ◙ } -0000:0370 20 20 20 0A 20 20 20 20-72 65 74 75 72 6E 20 72 ◙ return r -0000:0380 65 74 3F 72 65 74 3A 6C-69 6E 65 3B 0A 7D 0A 0A et?ret:line;◙}◙◙ -0000:0390 75 6E 69 74 74 65 73 74-20 7B 0A 20 20 20 20 61 unittest {◙ a -0000:03A0 73 73 65 72 74 28 20 73-74 72 69 70 4E 6F 6E 44 ssert( stripNonD -0000:03B0 69 67 69 74 28 22 61 73-64 66 22 29 20 3D 3D 20 igit("asdf") == -0000:03C0 22 22 20 20 29 3B 0A 20-20 20 20 61 73 73 65 72 "" );◙ asser -0000:03D0 74 28 20 73 74 72 69 70-4E 6F 6E 44 69 67 69 74 t( stripNonDigit -0000:03E0 28 22 5C 27 31 33 2D 3D-32 20 34 6B 6F 70 22 29 ("\'13-=2 4kop") -0000:03F0 20 3D 3D 20 20 22 31 33-32 34 22 20 20 29 3B 0A == "1324" );◙ -0000:0400 7D 0A 0A 2F 2F 2F 20 43-6F 6E 76 65 72 74 73 20 }◙◙/// Converts -0000:0410 61 20 77 6F 72 64 20 69-6E 74 6F 20 61 20 6E 75 a word into a nu -0000:0420 6D 62 65 72 2C 20 69 67-6E 6F 72 69 6E 67 20 61 mber, ignoring a -0000:0430 6C 6C 20 6E 6F 6E 20 61-6C 70 68 61 20 63 68 61 ll non alpha cha -0000:0440 72 61 63 74 65 72 73 20-20 0A 73 74 72 69 6E 67 racters ◙string -0000:0450 20 77 6F 72 64 54 6F 4E-75 6D 28 20 69 6E 20 73 wordToNum( in s -0000:0460 74 72 69 6E 67 20 77 6F-72 64 20 29 0A 7B 0A 2F tring word )◙{◙/ -0000:0470 2F 20 74 72 61 6E 73 6C-61 74 69 6F 6E 20 74 61 / translation ta -0000:0480 62 6C 65 20 66 6F 72 20-74 68 65 20 74 61 73 6B ble for the task -0000:0490 20 61 74 20 68 61 6E 64-0A 63 6F 6E 73 74 20 63 at hand◙const c -0000:04A0 68 61 72 5B 32 35 36 5D-20 54 52 41 4E 53 4C 41 har[256] TRANSLA -0000:04B0 54 45 20 3D 20 20 20 20-0A 20 20 20 20 22 20 20 TE = ◙ " -0000:04C0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:04D0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 22 20 " -0000:04E0 20 2F 2F 20 30 20 20 20-0A 20 20 20 20 22 20 20 // 0 ◙ " -0000:04F0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 30 31 01 -0000:0500 32 33 34 35 36 37 38 39-20 20 20 20 20 20 22 20 23456789 " -0000:0510 20 2F 2F 20 33 32 20 20-20 20 20 0A 20 20 20 20 // 32 ◙ -0000:0520 22 20 35 37 36 33 30 34-39 39 36 31 37 38 35 31 " 57630499617851 -0000:0530 38 38 31 32 33 34 37 36-32 32 33 39 20 20 20 20 881234762239 -0000:0540 20 22 20 20 2F 2F 20 36-34 20 20 20 0A 20 20 20 " // 64 ◙ -0000:0550 20 22 20 35 37 36 33 30-34 39 39 36 31 37 38 35 " 5763049961785 -0000:0560 31 38 38 31 32 33 34 37-36 32 32 33 39 20 20 20 1881234762239 -0000:0570 20 20 22 0A 20 20 20 20-22 20 20 20 20 20 20 20 "◙ " -0000:0580 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:0590 20 20 20 20 20 20 20 20-20 22 0A 20 20 20 20 22 "◙ " -0000:05A0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:05B0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:05C0 22 0A 20 20 20 20 22 20-20 20 20 20 20 20 20 20 "◙ " -0000:05D0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:05E0 20 20 20 20 20 20 20 22-20 20 20 20 0A 20 20 20 " ◙ -0000:05F0 20 22 20 20 20 20 20 20-20 20 20 20 20 20 20 20 " -0000:0600 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:0610 20 20 22 3B 0A 20 20 20-20 73 74 72 69 6E 67 20 ";◙ string -0000:0620 72 65 74 3B 0A 20 20 20-20 66 6F 72 65 61 63 68 ret;◙ foreach -0000:0630 28 63 3B 20 63 61 73 74-28 75 62 79 74 65 5B 5D (c; cast(ubyte[] -0000:0640 29 77 6F 72 64 29 0A 20-20 20 20 20 20 20 20 69 )word)◙ i -0000:0650 66 20 28 54 52 41 4E 53-4C 41 54 45 5B 63 5D 20 f (TRANSLATE[c] -0000:0660 21 3D 20 27 20 27 29 0A-20 20 20 20 20 20 20 20 != ' ')◙ -0000:0670 20 20 20 20 72 65 74 20-7E 3D 20 54 52 41 4E 53 ret ~= TRANS -0000:0680 4C 41 54 45 5B 63 5D 3B-0A 20 20 20 20 72 65 74 LATE[c];◙ ret -0000:0690 75 72 6E 20 72 65 74 3B-0A 7D 0A 0A 75 6E 69 74 urn ret;◙}◙◙unit -0000:06A0 74 65 73 74 20 7B 0A 20-2F 2F 20 54 65 73 74 20 test {◙ // Test -0000:06B0 77 6F 72 64 54 6F 4E 75-6D 20 75 73 69 6E 67 20 wordToNum using -0000:06C0 74 68 65 20 74 61 62 6C-65 20 66 72 6F 6D 20 74 the table from t -0000:06D0 68 65 20 74 61 73 6B 20-64 65 73 63 72 69 70 74 he task descript -0000:06E0 69 6F 6E 2E 0A 20 61 73-73 65 72 74 28 20 22 30 ion.◙ assert( "0 -0000:06F0 31 31 31 32 32 32 33 33-33 34 34 35 35 36 36 36 1112223334455666 -0000:0700 37 37 37 38 38 38 39 39-39 22 20 3D 3D 0A 20 20 777888999" ==◙ -0000:0710 20 77 6F 72 64 54 6F 4E-75 6D 28 22 45 20 7C 20 wordToNum("E | -0000:0720 4A 20 4E 20 51 20 7C 20-52 20 57 20 58 20 7C 20 J N Q | R W X | -0000:0730 44 20 53 20 59 20 7C 20-46 20 54 20 7C 20 41 20 D S Y | F T | A -0000:0740 4D 20 7C 20 43 20 49 20-56 20 7C 20 42 20 4B 20 M | C I V | B K -0000:0750 55 20 7C 20 4C 20 4F 20-50 20 7C 20 47 20 48 20 U | L O P | G H -0000:0760 5A 22 29 29 3B 0A 20 61-73 73 65 72 74 28 20 22 Z"));◙ assert( " -0000:0770 30 31 31 31 32 32 32 33-33 33 34 34 35 35 36 36 0111222333445566 -0000:0780 36 37 37 37 38 38 38 39-39 39 22 20 3D 3D 20 0A 6777888999" == ◙ -0000:0790 20 20 20 77 6F 72 64 54-6F 4E 75 6D 28 22 65 20 wordToNum("e -0000:07A0 7C 20 6A 20 6E 20 71 20-7C 20 72 20 77 20 78 20 | j n q | r w x -0000:07B0 7C 20 64 20 73 20 79 20-7C 20 66 20 74 20 7C 20 | d s y | f t | -0000:07C0 61 20 6D 20 7C 20 63 20-69 20 76 20 7C 20 62 20 a m | c i v | b -0000:07D0 6B 20 75 20 7C 20 6C 20-6F 20 70 20 7C 20 67 20 k u | l o p | g -0000:07E0 68 20 7A 22 29 29 3B 0A-20 61 73 73 65 72 74 28 h z"));◙ assert( -0000:07F0 20 22 30 31 32 33 34 35-36 37 38 39 22 20 3D 3D "0123456789" == -0000:0800 20 0A 20 20 20 77 6F 72-64 54 6F 4E 75 6D 28 22 ◙ wordToNum(" -0000:0810 30 20 7C 20 20 20 31 20-20 20 7C 20 20 20 32 20 0 | 1 | 2 -0000:0820 20 20 7C 20 20 20 33 20-20 20 7C 20 20 34 20 20 | 3 | 4 -0000:0830 7C 20 20 35 20 20 7C 20-20 20 36 20 20 20 7C 20 | 5 | 6 | -0000:0840 20 20 37 20 20 20 7C 20-20 20 38 20 20 20 7C 20 7 | 8 | -0000:0850 20 20 39 22 29 29 3B 0A-7D 0A 0A 76 6F 69 64 20 9"));◙}◙◙void -0000:0860 6D 61 69 6E 28 20 73 74-72 69 6E 67 5B 5D 20 61 main( string[] a -0000:0870 72 67 73 20 29 0A 7B 0A-20 20 20 20 2F 2F 20 54 rgs )◙{◙ // T -0000:0880 68 69 73 20 61 73 73 6F-63 69 61 74 69 76 65 20 his associative -0000:0890 61 72 72 61 79 20 6D 61-70 73 20 61 20 6E 75 6D array maps a num -0000:08A0 62 65 72 20 74 6F 20 61-6E 20 61 72 72 61 79 20 ber to an array -0000:08B0 6F 66 20 77 6F 72 64 73-2E 20 20 20 20 0A 20 20 of words. ◙ -0000:08C0 20 20 73 74 72 69 6E 67-61 72 72 61 79 5B 73 74 stringarray[st -0000:08D0 72 69 6E 67 5D 20 20 20-20 6E 75 6D 32 77 6F 72 ring] num2wor -0000:08E0 64 73 3B 0A 0A 20 20 20-20 66 6F 72 65 61 63 68 ds;◙◙ foreach -0000:08F0 28 73 74 72 69 6E 67 20-77 6F 72 64 3B 20 6E 65 (string word; ne -0000:0900 77 20 42 75 66 66 65 72-65 64 46 69 6C 65 28 22 w BufferedFile(" -0000:0910 64 69 63 74 69 6F 6E 61-72 79 2E 74 78 74 22 20 dictionary.txt" -0000:0920 29 20 29 0A 20 20 20 20-20 20 20 20 6E 75 6D 32 ) )◙ num2 -0000:0930 77 6F 72 64 73 5B 20 77-6F 72 64 54 6F 4E 75 6D words[ wordToNum -0000:0940 28 77 6F 72 64 29 20 5D-20 7E 3D 20 77 6F 72 64 (word) ] ~= word -0000:0950 2E 64 75 70 3B 20 20 20-20 20 20 20 20 2F 2F 20 .dup; // -0000:0960 6D 75 73 74 20 64 75 70-0A 0A 20 20 20 20 2F 2F must dup◙◙ // -0000:0970 2F 20 46 69 6E 64 73 20-61 6C 6C 20 61 6C 74 65 / Finds all alte -0000:0980 72 6E 61 74 69 76 65 73-20 66 6F 72 20 74 68 65 rnatives for the -0000:0990 20 67 69 76 65 6E 20 6E-75 6D 62 65 72 0A 20 20 given number◙ -0000:09A0 20 20 2F 2F 2F 20 28 73-68 6F 75 6C 64 20 68 61 /// (should ha -0000:09B0 76 65 20 62 65 65 6E 20-73 74 72 69 70 70 65 64 ve been stripped -0000:09C0 20 66 72 6F 6D 20 6E 6F-6E 2D 64 69 67 69 74 20 from non-digit -0000:09D0 63 68 61 72 61 63 74 65-72 73 29 0A 20 20 20 20 characters)◙ -0000:09E0 73 74 72 69 6E 67 61 72-72 61 79 20 5F 46 69 6E stringarray _Fin -0000:09F0 64 57 6F 72 64 73 28 20-73 74 72 69 6E 67 20 6E dWords( string n -0000:0A00 75 6D 62 65 72 73 2C 20-62 6F 6F 6C 20 64 69 67 umbers, bool dig -0000:0A10 69 74 6F 6B 20 29 0A 20-20 20 20 69 6E 20 7B 0A itok )◙ in {◙ -0000:0A20 20 20 20 20 20 20 20 20-61 73 73 65 72 74 28 6E assert(n -0000:0A30 75 6D 62 65 72 73 2E 6C-65 6E 67 74 68 20 3E 20 umbers.length > -0000:0A40 20 30 29 3B 20 20 20 20-0A 20 20 20 20 7D 20 20 0); ◙ } -0000:0A50 20 20 0A 20 20 20 20 6F-75 74 28 72 65 73 75 6C ◙ out(resul -0000:0A60 74 29 20 7B 0A 20 20 20-20 20 20 20 20 66 6F 72 t) {◙ for -0000:0A70 65 61 63 68 20 28 61 3B-20 72 65 73 75 6C 74 29 each (a; result) -0000:0A80 0A 20 20 20 20 20 20 20-20 20 20 20 20 61 73 73 ◙ ass -0000:0A90 65 72 74 28 20 77 6F 72-64 54 6F 4E 75 6D 28 61 ert( wordToNum(a -0000:0AA0 29 20 3D 3D 20 6E 75 6D-62 65 72 73 20 29 3B 0A ) == numbers );◙ -0000:0AB0 20 20 20 20 7D 20 20 20-20 0A 20 20 20 20 62 6F } ◙ bo -0000:0AC0 64 79 20 7B 0A 20 20 20-20 20 20 20 20 73 74 72 dy {◙ str -0000:0AD0 69 6E 67 61 72 72 61 79-20 72 65 74 3B 0A 20 20 ingarray ret;◙ -0000:0AE0 20 20 20 20 20 20 62 6F-6F 6C 20 66 6F 75 6E 64 bool found -0000:0AF0 77 6F 72 64 20 3D 20 66-61 6C 73 65 3B 0A 20 20 word = false;◙ -0000:0B00 20 20 20 20 20 20 66 6F-72 20 28 75 69 6E 74 20 for (uint -0000:0B10 74 3D 31 3B 20 74 3C 3D-6E 75 6D 62 65 72 73 2E t=1; t<=numbers. -0000:0B20 6C 65 6E 67 74 68 3B 20-2B 2B 74 29 20 7B 0A 20 length; ++t) {◙ -0000:0B30 20 20 20 20 20 20 20 20-20 20 20 61 75 74 6F 20 auto -0000:0B40 61 6C 74 65 72 6E 61 74-69 76 65 73 20 3D 20 6E alternatives = n -0000:0B50 75 6D 62 65 72 73 5B 30-2E 2E 74 5D 20 69 6E 20 umbers[0..t] in -0000:0B60 6E 75 6D 32 77 6F 72 64-73 3B 0A 20 20 20 20 20 num2words;◙ -0000:0B70 20 20 20 20 20 20 20 69-66 20 28 21 61 6C 74 65 if (!alte -0000:0B80 72 6E 61 74 69 76 65 73-29 0A 20 20 20 20 20 20 rnatives)◙ -0000:0B90 20 20 20 20 20 20 20 20-20 20 63 6F 6E 74 69 6E contin -0000:0BA0 75 65 3B 0A 20 20 20 20-20 20 20 20 20 20 20 20 ue;◙ -0000:0BB0 66 6F 75 6E 64 77 6F 72-64 20 3D 20 74 72 75 65 foundword = true -0000:0BC0 3B 0A 20 20 20 20 20 20-20 20 20 20 20 20 69 66 ;◙ if -0000:0BD0 20 28 6E 75 6D 62 65 72-73 2E 6C 65 6E 67 74 68 (numbers.length -0000:0BE0 20 3E 20 20 74 29 20 7B-0A 20 20 20 20 20 20 20 > t) {◙ -0000:0BF0 20 20 20 20 20 20 20 20-20 2F 2F 20 43 6F 6D 62 // Comb -0000:0C00 69 6E 65 20 61 6C 6C 20-63 75 72 72 65 6E 74 20 ine all current -0000:0C10 61 6C 74 65 72 6E 61 74-69 76 65 73 20 77 69 74 alternatives wit -0000:0C20 68 20 61 6C 6C 20 61 6C-74 65 72 6E 61 74 69 76 h all alternativ -0000:0C30 65 73 20 20 20 20 20 0A-20 20 20 20 20 20 20 20 es ◙ -0000:0C40 20 20 20 20 20 20 20 20-2F 2F 20 6F 66 20 74 68 // of th -0000:0C50 65 20 72 65 73 74 20 28-6E 65 78 74 20 70 69 65 e rest (next pie -0000:0C60 63 65 20 63 61 6E 20 73-74 61 72 74 20 77 69 74 ce can start wit -0000:0C70 68 20 61 20 64 69 67 69-74 29 20 20 20 20 20 20 h a digit) -0000:0C80 20 20 20 20 20 20 20 20-0A 20 20 20 20 20 20 20 ◙ -0000:0C90 20 20 20 20 20 20 20 20-20 66 6F 72 65 61 63 68 foreach -0000:0CA0 20 28 61 32 3B 20 5F 46-69 6E 64 57 6F 72 64 73 (a2; _FindWords -0000:0CB0 28 20 6E 75 6D 62 65 72-73 5B 74 2E 2E 24 5D 2C ( numbers[t..$], -0000:0CC0 20 74 72 75 65 20 20 20-20 20 29 20 29 0A 20 20 true ) )◙ -0000:0CD0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:0CE0 20 20 66 6F 72 65 61 63-68 28 61 31 3B 20 2A 61 foreach(a1; *a -0000:0CF0 6C 74 65 72 6E 61 74 69-76 65 73 29 0A 20 20 20 lternatives)◙ -0000:0D00 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:0D10 20 20 20 20 72 65 74 20-7E 3D 20 61 31 20 7E 20 ret ~= a1 ~ -0000:0D20 22 20 22 20 7E 20 61 32-3B 0A 20 20 20 20 20 20 " " ~ a2;◙ -0000:0D30 20 20 20 20 20 20 7D 0A-20 20 20 20 20 20 20 20 }◙ -0000:0D40 20 20 20 20 65 6C 73 65-20 20 20 20 0A 20 20 20 else ◙ -0000:0D50 20 20 20 20 20 20 20 20-20 20 20 20 20 72 65 74 ret -0000:0D60 20 7E 3D 20 2A 61 6C 74-65 72 6E 61 74 69 76 65 ~= *alternative -0000:0D70 73 3B 20 20 20 20 2F 2F-20 61 70 70 65 6E 64 20 s; // append -0000:0D80 74 68 65 73 65 20 61 6C-74 65 72 6E 61 74 69 76 these alternativ -0000:0D90 65 73 0A 20 20 20 20 20-20 20 20 7D 0A 20 20 20 es◙ }◙ -0000:0DA0 20 20 20 20 20 2F 2F 20-54 72 79 20 74 6F 20 6B // Try to k -0000:0DB0 65 65 70 20 31 20 64 69-67 69 74 2C 20 6F 6E 6C eep 1 digit, onl -0000:0DC0 79 20 69 66 20 77 65 27-72 65 20 61 6C 6C 6F 77 y if we're allow -0000:0DD0 65 64 20 61 6E 64 20 6E-6F 20 6F 74 68 65 72 0A ed and no other◙ -0000:0DE0 20 20 20 20 20 20 20 20-2F 2F 20 61 6C 74 65 72 // alter -0000:0DF0 6E 61 74 69 76 65 73 20-77 65 72 65 20 66 6F 75 natives were fou -0000:0E00 6E 64 0A 20 20 20 20 20-20 20 20 2F 2F 20 54 65 nd◙ // Te -0000:0E10 73 74 69 6E 67 20 22 72-65 74 2E 6C 65 6E 67 74 sting "ret.lengt -0000:0E20 68 22 20 6D 61 6B 65 73-20 6D 6F 72 65 20 73 65 h" makes more se -0000:0E30 6E 73 65 20 74 68 61 6E-20 74 65 73 74 69 6E 67 nse than testing -0000:0E40 20 22 66 6F 75 6E 64 77-6F 72 64 22 2C 0A 20 20 "foundword",◙ -0000:0E50 20 20 20 20 20 20 2F 2F-20 62 75 74 20 74 68 65 // but the -0000:0E60 20 6F 74 68 65 72 20 69-6D 70 6C 65 6D 65 6E 74 other implement -0000:0E70 61 74 69 6F 6E 73 20 73-65 65 6D 20 74 6F 20 64 ations seem to d -0000:0E80 6F 20 6A 75 73 74 20 74-68 69 73 2E 0A 20 20 20 o just this.◙ -0000:0E90 20 20 20 20 20 69 66 20-28 64 69 67 69 74 6F 6B if (digitok -0000:0EA0 20 26 26 20 21 66 6F 75-6E 64 77 6F 72 64 29 20 && !foundword) -0000:0EB0 7B 20 2F 2F 72 65 74 2E-6C 65 6E 67 74 68 20 3D { //ret.length = -0000:0EC0 3D 20 30 20 20 0A 20 20-20 20 20 20 20 20 20 20 = 0 ◙ -0000:0ED0 20 20 69 66 28 6E 75 6D-62 65 72 73 2E 6C 65 6E if(numbers.len -0000:0EE0 67 74 68 20 3E 20 20 31-29 20 7B 0A 20 20 20 20 gth > 1) {◙ -0000:0EF0 20 20 20 20 20 20 20 20-20 20 20 20 2F 2F 20 43 // C -0000:0F00 6F 6D 62 69 6E 65 20 31-20 64 69 67 69 74 20 77 ombine 1 digit w -0000:0F10 69 74 68 20 61 6C 6C 20-61 6C 74 65 6E 61 74 69 ith all altenati -0000:0F20 76 65 73 20 66 72 6F 6D-20 74 68 65 20 72 65 73 ves from the res -0000:0F30 74 20 20 20 20 0A 20 20-20 20 20 20 20 20 20 20 t ◙ -0000:0F40 20 20 20 20 20 20 2F 2F-20 28 6E 65 78 74 20 70 // (next p -0000:0F50 69 65 63 65 20 63 61 6E-20 6E 6F 74 20 73 74 61 iece can not sta -0000:0F60 72 74 20 77 69 74 68 20-61 20 64 69 67 69 74 29 rt with a digit) -0000:0F70 20 20 20 20 20 20 20 20-20 20 0A 20 20 20 20 20 ◙ -0000:0F80 20 20 20 20 20 20 20 20-20 20 20 66 6F 72 65 61 forea -0000:0F90 63 68 20 28 61 3B 20 5F-46 69 6E 64 57 6F 72 64 ch (a; _FindWord -0000:0FA0 73 28 20 6E 75 6D 62 65-72 73 5B 31 2E 2E 24 5D s( numbers[1..$] -0000:0FB0 2C 20 66 61 6C 73 65 20-29 20 29 0A 20 20 20 20 , false ) )◙ -0000:0FC0 20 20 20 20 20 20 20 20-20 20 20 20 20 20 20 20 -0000:0FD0 72 65 74 20 7E 3D 20 6E-75 6D 62 65 72 73 5B 30 ret ~= numbers[0 -0000:0FE0 2E 2E 31 5D 20 7E 20 22-20 22 20 7E 20 61 3B 0A ..1] ~ " " ~ a;◙ -0000:0FF0 20 20 20 20 20 20 20 20-20 20 20 20 7D 20 20 20 } -0000:1000 20 0A 20 20 20 20 20 20-20 20 20 20 20 20 65 6C ◙ el -0000:1010 73 65 20 20 20 20 0A 20-20 20 20 20 20 20 20 20 se ◙ -0000:1020 20 20 20 20 20 20 20 72-65 74 20 7E 3D 20 6E 75 ret ~= nu -0000:1030 6D 62 65 72 73 5B 30 2E-2E 31 5D 3B 20 20 20 20 mbers[0..1]; -0000:1040 2F 2F 20 6A 75 73 74 20-61 70 70 65 6E 64 20 74 // just append t -0000:1050 68 69 73 20 64 69 67 69-74 20 20 20 20 20 20 20 his digit -0000:1060 20 20 20 20 20 20 0A 20-20 20 20 20 20 20 20 7D ◙ } -0000:1070 20 20 20 20 0A 20 20 20-20 20 20 20 20 72 65 74 ◙ ret -0000:1080 75 72 6E 20 72 65 74 3B-0A 20 20 20 20 7D 0A 0A urn ret;◙ }◙◙ -0000:1090 20 20 20 20 2F 2F 2F 20-28 54 68 69 73 20 66 75 /// (This fu -0000:10A0 6E 63 74 69 6F 6E 20 77-61 73 20 69 6E 6C 69 6E nction was inlin -0000:10B0 65 64 20 69 6E 20 74 68-65 20 6F 72 69 67 69 6E ed in the origin -0000:10C0 61 6C 20 70 72 6F 67 72-61 6D 29 20 0A 20 20 20 al program) ◙ -0000:10D0 20 2F 2F 2F 20 46 69 6E-64 73 20 61 6C 6C 20 61 /// Finds all a -0000:10E0 6C 74 65 72 6E 61 74 69-76 65 73 20 66 6F 72 20 lternatives for -0000:10F0 74 68 65 20 67 69 76 65-6E 20 70 68 6F 6E 65 20 the given phone -0000:1100 6E 75 6D 62 65 72 20 0A-20 20 20 20 2F 2F 2F 20 number ◙ /// -0000:1110 52 65 74 75 72 6E 73 3A-20 61 72 72 61 79 20 6F Returns: array o -0000:1120 66 20 73 74 72 69 6E 67-73 20 0A 20 20 20 20 73 f strings ◙ s -0000:1130 74 72 69 6E 67 61 72 72-61 79 20 46 69 6E 64 57 tringarray FindW -0000:1140 6F 72 64 73 28 20 73 74-72 69 6E 67 20 70 68 6F ords( string pho -0000:1150 6E 65 5F 6E 75 6D 62 65-72 20 29 0A 20 20 20 20 ne_number )◙ -0000:1160 7B 0A 20 20 20 20 20 20-20 20 69 66 20 28 21 70 {◙ if (!p -0000:1170 68 6F 6E 65 5F 6E 75 6D-62 65 72 2E 6C 65 6E 67 hone_number.leng -0000:1180 74 68 29 0A 20 20 20 20-20 20 20 20 20 20 20 20 th)◙ -0000:1190 72 65 74 75 72 6E 20 6E-75 6C 6C 3B 0A 20 20 20 return null;◙ -0000:11A0 20 20 20 20 20 2F 2F 20-53 74 72 69 70 20 74 68 // Strip th -0000:11B0 65 20 6E 6F 6E 2D 64 69-67 69 74 20 63 68 61 72 e non-digit char -0000:11C0 61 63 74 65 72 73 20 66-72 6F 6D 20 74 68 65 20 acters from the -0000:11D0 70 68 6F 6E 65 20 6E 75-6D 62 65 72 2C 20 61 6E phone number, an -0000:11E0 64 0A 20 20 20 20 20 20-20 20 2F 2F 20 70 61 73 d◙ // pas -0000:11F0 73 20 69 74 20 74 6F 20-74 68 65 20 72 65 63 75 s it to the recu -0000:1200 72 73 69 76 65 20 66 75-6E 63 74 69 6F 6E 20 28 rsive function ( -0000:1210 6C 65 61 64 69 6E 67 20-64 69 67 69 74 20 69 73 leading digit is -0000:1220 20 61 6C 6C 6F 77 65 64-29 0A 20 20 20 20 20 20 allowed)◙ -0000:1230 20 20 72 65 74 75 72 6E-20 5F 46 69 6E 64 57 6F return _FindWo -0000:1240 72 64 73 28 20 73 74 72-69 70 4E 6F 6E 44 69 67 rds( stripNonDig -0000:1250 69 74 28 70 68 6F 6E 65-5F 6E 75 6D 62 65 72 29 it(phone_number) -0000:1260 2C 20 74 72 75 65 20 29-3B 20 20 20 20 0A 20 20 , true ); ◙ -0000:1270 20 20 7D 20 20 20 20 0A-20 20 20 20 0A 20 20 20 } ◙ ◙ -0000:1280 20 2F 2F 20 52 65 61 64-20 74 68 65 20 70 68 6F // Read the pho -0000:1290 6E 65 20 6E 75 6D 62 65-72 73 20 20 20 20 20 0A ne numbers ◙ -0000:12A0 20 20 20 20 66 6F 72 65-61 63 68 28 73 74 72 69 foreach(stri -0000:12B0 6E 67 20 70 68 6F 6E 65-3B 20 6E 65 77 20 42 75 ng phone; new Bu -0000:12C0 66 66 65 72 65 64 46 69-6C 65 28 22 69 6E 70 75 fferedFile("inpu -0000:12D0 74 2E 74 78 74 22 20 20-20 29 20 29 0A 20 20 20 t.txt" ) )◙ -0000:12E0 20 20 20 20 20 66 6F 72-65 61 63 68 28 61 6C 74 foreach(alt -0000:12F0 65 72 6E 61 74 69 76 65-3B 20 46 69 6E 64 57 6F ernative; FindWo -0000:1300 72 64 73 28 20 70 68 6F-6E 65 20 29 20 29 0A 20 rds( phone ) )◙ -0000:1310 20 20 20 20 20 20 20 20-20 20 20 77 72 69 74 65 write -0000:1320 66 6C 6E 28 70 68 6F 6E-65 2C 20 22 3A 20 22 2C fln(phone, ": ", -0000:1330 20 61 6C 74 65 72 6E 61-74 69 76 65 20 29 3B 0A alternative );◙ -0000:1340 7D 0A 0A }◙◙ diff --git a/tests/examplefiles/hexdump_hd b/tests/examplefiles/hexdump_hd deleted file mode 100644 index 4af46fcb..00000000 --- a/tests/examplefiles/hexdump_hd +++ /dev/null @@ -1,310 +0,0 @@ -00000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 |// Created by Li| -00000010 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e |onello Lunesu an| -00000020 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 |d placed in the | -00000030 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f |public domain../| -00000040 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 |/ This file has | -00000050 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 |been modified fr| -00000060 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 |om its original | -00000070 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 |version..// It h| -00000080 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 |as been formatte| -00000090 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 |d to fit your sc| -000000a0 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f |reen..module pho| -000000b0 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 |neno; // opt| -000000c0 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 |ional.import std| -000000d0 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 |.stdio; // wri| -000000e0 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 |tefln .impor| -000000f0 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f |t std.ctype; /| -00000100 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 |/ isdigit .i| -00000110 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d |mport std.stream| -00000120 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 |; // BufferedFi| -00000130 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 |le..// Just for | -00000140 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 |readability (ima| -00000150 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 |gine char[][][ch| -00000160 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 |ar[]]) .alias| -00000170 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a | char[] string;.| -00000180 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 |alias string[] s| -00000190 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f |tringarray;..///| -000001a0 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 | Strips non-digi| -000001b0 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f |t characters fro| -000001c0 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f |m the string (CO| -000001d0 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e |W).string stripN| -000001e0 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 |onDigit( in stri| -000001f0 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 |ng line ) .{. | -00000200 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 | string ret;. | -00000210 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c | foreach(uint i,| -00000220 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 | c; line) {. | -00000230 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 | // Error: st| -00000240 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 |d.ctype.isdigit | -00000250 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 |at C:\dmd\src\ph| -00000260 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 |obos\std\ctype.d| -00000270 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f |(37) . //| -00000280 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 | conflicts with | -00000290 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 |std.stream.isdig| -000002a0 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 |it at C:\dmd\src| -000002b0 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 |\phobos\std\stre| -000002c0 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 |am.d(2924). | -000002d0 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 | if (!std.ctyp| -000002e0 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a |e.isdigit(c)) {.| -000002f0 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 | if (| -00000300 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 |!ret). | -00000310 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 | ret = line| -00000320 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 |[0..i]; . | -00000330 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 | } . | -00000340 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a | else if (ret).| -00000350 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 | ret | -00000360 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 |~= c; . } | -00000370 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 | . return r| -00000380 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a |et?ret:line;.}..| -00000390 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 |unittest {. a| -000003a0 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 |ssert( stripNonD| -000003b0 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 |igit("asdf") == | -000003c0 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 |"" );. asser| -000003d0 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 |t( stripNonDigit| -000003e0 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 |("\'13-=2 4kop")| -000003f0 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a | == "1324" );.| -00000400 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 |}../// Converts | -00000410 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 |a word into a nu| -00000420 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 |mber, ignoring a| -00000430 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 |ll non alpha cha| -00000440 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 |racters .string| -00000450 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 | wordToNum( in s| -00000460 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f |tring word ).{./| -00000470 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 |/ translation ta| -00000480 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b |ble for the task| -00000490 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 | at hand.const c| -000004a0 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 |har[256] TRANSLA| -000004b0 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 |TE = . " | -000004c0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -000004d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 | " | -000004e0 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 | // 0 . " | -000004f0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 | 01| -00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 |23456789 " | -00000510 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 | // 32 . | -00000520 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 |" 57630499617851| -00000530 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 |881234762239 | -00000540 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 | " // 64 . | -00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 | " 5763049961785| -00000560 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 |1881234762239 | -00000570 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 | ". " | -00000580 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000590 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 | ". "| -000005a0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -* -000005c0 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 |". " | -000005d0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -000005e0 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 | " . | -000005f0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | " | -00000600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000610 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 | ";. string | -00000620 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 |ret;. foreach| -00000630 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d |(c; cast(ubyte[]| -00000640 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 |)word). i| -00000650 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 |f (TRANSLATE[c] | -00000660 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 |!= ' '). | -00000670 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 | ret ~= TRANS| -00000680 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 |LATE[c];. ret| -00000690 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 |urn ret;.}..unit| -000006a0 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 |test {. // Test | -000006b0 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 |wordToNum using | -000006c0 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 |the table from t| -000006d0 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 |he task descript| -000006e0 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 |ion.. assert( "0| -000006f0 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 |1112223334455666| -00000700 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 |777888999" ==. | -00000710 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 | wordToNum("E | | -00000720 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 |J N Q | R W X | | -00000730 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 |D S Y | F T | A | -00000740 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 |M | C I V | B K | -00000750 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 |U | L O P | G H | -00000760 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 |Z"));. assert( "| -00000770 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 |0111222333445566| -00000780 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a |6777888999" == .| -00000790 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 | wordToNum("e | -000007a0 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 || j n q | r w x | -000007b0 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 || d s y | f t | | -000007c0 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 |a m | c i v | b | -000007d0 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 |k u | l o p | g | -000007e0 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 |h z"));. assert(| -000007f0 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d | "0123456789" ==| -00000800 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 | . wordToNum("| -00000810 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 |0 | 1 | 2 | -00000820 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 | | 3 | 4 | -00000830 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 || 5 | 6 | | -00000840 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 | 7 | 8 | | -00000850 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 | 9"));.}..void | -00000860 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 |main( string[] a| -00000870 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 |rgs ).{. // T| -00000880 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 |his associative | -00000890 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d |array maps a num| -000008a0 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 |ber to an array | -000008b0 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 |of words. . | -000008c0 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 | stringarray[st| -000008d0 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 |ring] num2wor| -000008e0 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 |ds;.. foreach| -000008f0 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 |(string word; ne| -00000900 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 |w BufferedFile("| -00000910 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 |dictionary.txt" | -00000920 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 |) ). num2| -00000930 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d |words[ wordToNum| -00000940 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 |(word) ] ~= word| -00000950 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 |.dup; // | -00000960 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f |must dup.. //| -00000970 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 |/ Finds all alte| -00000980 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 |rnatives for the| -00000990 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 | given number. | -000009a0 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 | /// (should ha| -000009b0 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 |ve been stripped| -000009c0 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 | from non-digit | -000009d0 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 |characters). | -000009e0 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e |stringarray _Fin| -000009f0 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e |dWords( string n| -00000a00 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 |umbers, bool dig| -00000a10 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a |itok ). in {.| -00000a20 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e | assert(n| -00000a30 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 |umbers.length > | -00000a40 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 | 0); . } | -00000a50 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c | . out(resul| -00000a60 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 |t) {. for| -00000a70 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 |each (a; result)| -00000a80 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 |. ass| -00000a90 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 |ert( wordToNum(a| -00000aa0 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a |) == numbers );.| -00000ab0 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f | } . bo| -00000ac0 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 |dy {. str| -00000ad0 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 |ingarray ret;. | -00000ae0 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 | bool found| -00000af0 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 |word = false;. | -00000b00 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 | for (uint | -00000b10 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e |t=1; t<=numbers.| -00000b20 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 |length; ++t) {. | -00000b30 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 | auto | -00000b40 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e |alternatives = n| -00000b50 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 |umbers[0..t] in | -00000b60 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 |num2words;. | -00000b70 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 | if (!alte| -00000b80 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 |rnatives). | -00000b90 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e | contin| -00000ba0 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 |ue;. | -00000bb0 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 |foundword = true| -00000bc0 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 |;. if| -00000bd0 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 | (numbers.length| -00000be0 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 | > t) {. | -00000bf0 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 | // Comb| -00000c00 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 |ine all current | -00000c10 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 |alternatives wit| -00000c20 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 |h all alternativ| -00000c30 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 |es . | -00000c40 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 | // of th| -00000c50 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 |e rest (next pie| -00000c60 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 |ce can start wit| -00000c70 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 |h a digit) | -00000c80 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 | . | -00000c90 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 | foreach| -00000ca0 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 | (a2; _FindWords| -00000cb0 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c |( numbers[t..$],| -00000cc0 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 | true ) ). | -00000cd0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000ce0 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 | foreach(a1; *a| -00000cf0 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 |lternatives). | -00000d00 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000d10 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 | ret ~= a1 ~ | -00000d20 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 |" " ~ a2;. | -00000d30 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 | }. | -00000d40 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 | else . | -00000d50 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 | ret| -00000d60 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 | ~= *alternative| -00000d70 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 |s; // append | -00000d80 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 |these alternativ| -00000d90 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 |es. }. | -00000da0 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b | // Try to k| -00000db0 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c |eep 1 digit, onl| -00000dc0 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 |y if we're allow| -00000dd0 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a |ed and no other.| -00000de0 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 | // alter| -00000df0 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 |natives were fou| -00000e00 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 |nd. // Te| -00000e10 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 |sting "ret.lengt| -00000e20 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 |h" makes more se| -00000e30 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 |nse than testing| -00000e40 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 | "foundword",. | -00000e50 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 | // but the| -00000e60 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 | other implement| -00000e70 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 |ations seem to d| -00000e80 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 |o just this.. | -00000e90 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b | if (digitok| -00000ea0 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 | && !foundword) | -00000eb0 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d |{ //ret.length =| -00000ec0 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 |= 0 . | -00000ed0 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e | if(numbers.len| -00000ee0 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 |gth > 1) {. | -00000ef0 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 | // C| -00000f00 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 |ombine 1 digit w| -00000f10 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 |ith all altenati| -00000f20 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 |ves from the res| -00000f30 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 |t . | -00000f40 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 | // (next p| -00000f50 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 |iece can not sta| -00000f60 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 |rt with a digit)| -00000f70 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 | . | -00000f80 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 | forea| -00000f90 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 |ch (a; _FindWord| -00000fa0 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d |s( numbers[1..$]| -00000fb0 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 |, false ) ). | -00000fc0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | | -00000fd0 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 |ret ~= numbers[0| -00000fe0 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a |..1] ~ " " ~ a;.| -00000ff0 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 | } | -00001000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c | . el| -00001010 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 |se . | -00001020 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 | ret ~= nu| -00001030 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 |mbers[0..1]; | -00001040 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 |// just append t| -00001050 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 |his digit | -00001060 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d | . }| -00001070 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 | . ret| -00001080 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a |urn ret;. }..| -00001090 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 | /// (This fu| -000010a0 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e |nction was inlin| -000010b0 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e |ed in the origin| -000010c0 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 |al program) . | -000010d0 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 | /// Finds all a| -000010e0 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 |lternatives for | -000010f0 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 |the given phone | -00001100 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 |number . /// | -00001110 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f |Returns: array o| -00001120 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 |f strings . s| -00001130 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 |tringarray FindW| -00001140 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f |ords( string pho| -00001150 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 |ne_number ). | -00001160 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 |{. if (!p| -00001170 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 |hone_number.leng| -00001180 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 |th). | -00001190 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 |return null;. | -000011a0 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 | // Strip th| -000011b0 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 |e non-digit char| -000011c0 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 |acters from the | -000011d0 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e |phone number, an| -000011e0 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 |d. // pas| -000011f0 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 |s it to the recu| -00001200 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 |rsive function (| -00001210 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 |leading digit is| -00001220 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 | allowed). | -00001230 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f | return _FindWo| -00001240 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 |rds( stripNonDig| -00001250 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 |it(phone_number)| -00001260 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 |, true ); . | -00001270 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 | } . . | -00001280 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f | // Read the pho| -00001290 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a |ne numbers .| -000012a0 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 | foreach(stri| -000012b0 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 |ng phone; new Bu| -000012c0 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 |fferedFile("inpu| -000012d0 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 |t.txt" ) ). | -000012e0 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 | foreach(alt| -000012f0 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f |ernative; FindWo| -00001300 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 |rds( phone ) ). | -00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 | write| -00001320 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c |fln(phone, ": ",| -00001330 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a | alternative );.| -00001340 7d 0a 0a |}..| -00001343 diff --git a/tests/examplefiles/hexdump_hexcat b/tests/examplefiles/hexdump_hexcat deleted file mode 100644 index 522074cf..00000000 --- a/tests/examplefiles/hexdump_hexcat +++ /dev/null @@ -1,247 +0,0 @@ -00000000 2F 2F 20 43 72 65 61 74 65 64 20 62 79 20 4C 69 6F 6E 65 6C //.Created.by.Lionel -00000014 6C 6F 20 4C 75 6E 65 73 75 20 61 6E 64 20 70 6C 61 63 65 64 lo.Lunesu.and.placed -00000028 20 69 6E 20 74 68 65 20 70 75 62 6C 69 63 20 64 6F 6D 61 69 .in.the.public.domai -0000003C 6E 2E 0A 2F 2F 20 54 68 69 73 20 66 69 6C 65 20 68 61 73 20 n..//.This.file.has. -00000050 62 65 65 6E 20 6D 6F 64 69 66 69 65 64 20 66 72 6F 6D 20 69 been.modified.from.i -00000064 74 73 20 6F 72 69 67 69 6E 61 6C 20 76 65 72 73 69 6F 6E 2E ts.original.version. -00000078 0A 2F 2F 20 49 74 20 68 61 73 20 62 65 65 6E 20 66 6F 72 6D .//.It.has.been.form -0000008C 61 74 74 65 64 20 74 6F 20 66 69 74 20 79 6F 75 72 20 73 63 atted.to.fit.your.sc -000000A0 72 65 65 6E 2E 0A 6D 6F 64 75 6C 65 20 70 68 6F 6E 65 6E 6F reen..module.phoneno -000000B4 3B 20 20 20 20 20 2F 2F 20 6F 70 74 69 6F 6E 61 6C 0A 69 6D ;.....//.optional.im -000000C8 70 6F 72 74 20 73 74 64 2E 73 74 64 69 6F 3B 20 20 20 2F 2F port.std.stdio;...// -000000DC 20 77 72 69 74 65 66 6C 6E 20 20 20 20 20 0A 69 6D 70 6F 72 .writefln......impor -000000F0 74 20 73 74 64 2E 63 74 79 70 65 3B 20 20 20 2F 2F 20 69 73 t.std.ctype;...//.is -00000104 64 69 67 69 74 20 20 20 20 20 0A 69 6D 70 6F 72 74 20 73 74 digit......import.st -00000118 64 2E 73 74 72 65 61 6D 3B 20 20 2F 2F 20 42 75 66 66 65 72 d.stream;..//.Buffer -0000012C 65 64 46 69 6C 65 0A 0A 2F 2F 20 4A 75 73 74 20 66 6F 72 20 edFile..//.Just.for. -00000140 72 65 61 64 61 62 69 6C 69 74 79 20 28 69 6D 61 67 69 6E 65 readability.(imagine -00000154 20 63 68 61 72 5B 5D 5B 5D 5B 63 68 61 72 5B 5D 5D 29 20 20 .char[][][char[]]).. -00000168 20 20 0A 61 6C 69 61 73 20 63 68 61 72 5B 5D 20 73 74 72 69 ...alias.char[].stri -0000017C 6E 67 3B 0A 61 6C 69 61 73 20 73 74 72 69 6E 67 5B 5D 20 73 ng;.alias.string[].s -00000190 74 72 69 6E 67 61 72 72 61 79 3B 0A 0A 2F 2F 2F 20 53 74 72 tringarray;..///.Str -000001A4 69 70 73 20 6E 6F 6E 2D 64 69 67 69 74 20 63 68 61 72 61 63 ips.non-digit.charac -000001B8 74 65 72 73 20 66 72 6F 6D 20 74 68 65 20 73 74 72 69 6E 67 ters.from.the.string -000001CC 20 28 43 4F 57 29 0A 73 74 72 69 6E 67 20 73 74 72 69 70 4E .(COW).string.stripN -000001E0 6F 6E 44 69 67 69 74 28 20 69 6E 20 73 74 72 69 6E 67 20 6C onDigit(.in.string.l -000001F4 69 6E 65 20 29 20 0A 7B 0A 20 20 20 20 73 74 72 69 6E 67 20 ine.)..{.....string. -00000208 72 65 74 3B 0A 20 20 20 20 66 6F 72 65 61 63 68 28 75 69 6E ret;.....foreach(uin -0000021C 74 20 69 2C 20 63 3B 20 6C 69 6E 65 29 20 7B 0A 20 20 20 20 t.i,.c;.line).{..... -00000230 20 20 20 20 2F 2F 20 45 72 72 6F 72 3A 20 73 74 64 2E 63 74 ....//.Error:.std.ct -00000244 79 70 65 2E 69 73 64 69 67 69 74 20 61 74 20 43 3A 5C 64 6D ype.isdigit.at.C:\dm -00000258 64 5C 73 72 63 5C 70 68 6F 62 6F 73 5C 73 74 64 5C 63 74 79 d\src\phobos\std\cty -0000026C 70 65 2E 64 28 33 37 29 20 0A 20 20 20 20 20 20 20 20 2F 2F pe.d(37)..........// -00000280 20 63 6F 6E 66 6C 69 63 74 73 20 77 69 74 68 20 73 74 64 2E .conflicts.with.std. -00000294 73 74 72 65 61 6D 2E 69 73 64 69 67 69 74 20 61 74 20 43 3A stream.isdigit.at.C: -000002A8 5C 64 6D 64 5C 73 72 63 5C 70 68 6F 62 6F 73 5C 73 74 64 5C \dmd\src\phobos\std\ -000002BC 73 74 72 65 61 6D 2E 64 28 32 39 32 34 29 0A 20 20 20 20 20 stream.d(2924)...... -000002D0 20 20 20 69 66 20 28 21 73 74 64 2E 63 74 79 70 65 2E 69 73 ...if.(!std.ctype.is -000002E4 64 69 67 69 74 28 63 29 29 20 7B 0A 20 20 20 20 20 20 20 20 digit(c)).{......... -000002F8 20 20 20 20 69 66 20 28 21 72 65 74 29 0A 20 20 20 20 20 20 ....if.(!ret)....... -0000030C 20 20 20 20 20 20 20 20 20 20 72 65 74 20 3D 20 6C 69 6E 65 ..........ret.=.line -00000320 5B 30 2E 2E 69 5D 3B 20 20 20 20 0A 20 20 20 20 20 20 20 20 [0..i];............. -00000334 7D 20 20 20 20 0A 20 20 20 20 20 20 20 20 65 6C 73 65 20 69 }.............else.i -00000348 66 20 28 72 65 74 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 f.(ret)............. -0000035C 72 65 74 20 7E 3D 20 63 3B 20 20 20 20 0A 20 20 20 20 7D 20 ret.~=.c;.........}. -00000370 20 20 20 0A 20 20 20 20 72 65 74 75 72 6E 20 72 65 74 3F 72 ........return.ret?r -00000384 65 74 3A 6C 69 6E 65 3B 0A 7D 0A 0A 75 6E 69 74 74 65 73 74 et:line;.}..unittest -00000398 20 7B 0A 20 20 20 20 61 73 73 65 72 74 28 20 73 74 72 69 70 .{.....assert(.strip -000003AC 4E 6F 6E 44 69 67 69 74 28 22 61 73 64 66 22 29 20 3D 3D 20 NonDigit("asdf").==. -000003C0 22 22 20 20 29 3B 0A 20 20 20 20 61 73 73 65 72 74 28 20 73 ""..);.....assert(.s -000003D4 74 72 69 70 4E 6F 6E 44 69 67 69 74 28 22 5C 27 31 33 2D 3D tripNonDigit("\'13-= -000003E8 32 20 34 6B 6F 70 22 29 20 3D 3D 20 20 22 31 33 32 34 22 20 2.4kop").==.."1324". -000003FC 20 29 3B 0A 7D 0A 0A 2F 2F 2F 20 43 6F 6E 76 65 72 74 73 20 .);.}..///.Converts. -00000410 61 20 77 6F 72 64 20 69 6E 74 6F 20 61 20 6E 75 6D 62 65 72 a.word.into.a.number -00000424 2C 20 69 67 6E 6F 72 69 6E 67 20 61 6C 6C 20 6E 6F 6E 20 61 ,.ignoring.all.non.a -00000438 6C 70 68 61 20 63 68 61 72 61 63 74 65 72 73 20 20 0A 73 74 lpha.characters...st -0000044C 72 69 6E 67 20 77 6F 72 64 54 6F 4E 75 6D 28 20 69 6E 20 73 ring.wordToNum(.in.s -00000460 74 72 69 6E 67 20 77 6F 72 64 20 29 0A 7B 0A 2F 2F 20 74 72 tring.word.).{.//.tr -00000474 61 6E 73 6C 61 74 69 6F 6E 20 74 61 62 6C 65 20 66 6F 72 20 anslation.table.for. -00000488 74 68 65 20 74 61 73 6B 20 61 74 20 68 61 6E 64 0A 63 6F 6E the.task.at.hand.con -0000049C 73 74 20 63 68 61 72 5B 32 35 36 5D 20 54 52 41 4E 53 4C 41 st.char[256].TRANSLA -000004B0 54 45 20 3D 20 20 20 20 0A 20 20 20 20 22 20 20 20 20 20 20 TE.=........."...... -000004C4 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... -000004D8 20 20 20 20 20 20 22 20 20 2F 2F 20 30 20 20 20 0A 20 20 20 ......"..//.0....... -000004EC 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 ."................01 -00000500 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 20 2F 2F 20 23456789......"..//. -00000514 33 32 20 20 20 20 20 0A 20 20 20 20 22 20 35 37 36 33 30 34 32..........".576304 -00000528 39 39 36 31 37 38 35 31 38 38 31 32 33 34 37 36 32 32 33 39 99617851881234762239 -0000053C 20 20 20 20 20 22 20 20 2F 2F 20 36 34 20 20 20 0A 20 20 20 ....."..//.64....... -00000550 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 38 38 31 .".57630499617851881 -00000564 32 33 34 37 36 32 32 33 39 20 20 20 20 20 22 0A 20 20 20 20 234762239....."..... -00000578 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 "................... -0000058C 20 20 20 20 20 20 20 20 20 20 20 20 20 22 0A 20 20 20 20 22 ............."....." -000005A0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... -000005B4 20 20 20 20 20 20 20 20 20 20 20 20 22 0A 20 20 20 20 22 20 ............".....". -000005C8 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... -000005DC 20 20 20 20 20 20 20 20 20 20 20 22 20 20 20 20 0A 20 20 20 ..........."........ -000005F0 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .".................. -00000604 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 3B 0A 20 20 20 ..............";.... -00000618 20 73 74 72 69 6E 67 20 72 65 74 3B 0A 20 20 20 20 66 6F 72 .string.ret;.....for -0000062C 65 61 63 68 28 63 3B 20 63 61 73 74 28 75 62 79 74 65 5B 5D each(c;.cast(ubyte[] -00000640 29 77 6F 72 64 29 0A 20 20 20 20 20 20 20 20 69 66 20 28 54 )word).........if.(T -00000654 52 41 4E 53 4C 41 54 45 5B 63 5D 20 21 3D 20 27 20 27 29 0A RANSLATE[c].!=.'.'). -00000668 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 54 ............ret.~=.T -0000067C 52 41 4E 53 4C 41 54 45 5B 63 5D 3B 0A 20 20 20 20 72 65 74 RANSLATE[c];.....ret -00000690 75 72 6E 20 72 65 74 3B 0A 7D 0A 0A 75 6E 69 74 74 65 73 74 urn.ret;.}..unittest -000006A4 20 7B 0A 20 2F 2F 20 54 65 73 74 20 77 6F 72 64 54 6F 4E 75 .{..//.Test.wordToNu -000006B8 6D 20 75 73 69 6E 67 20 74 68 65 20 74 61 62 6C 65 20 66 72 m.using.the.table.fr -000006CC 6F 6D 20 74 68 65 20 74 61 73 6B 20 64 65 73 63 72 69 70 74 om.the.task.descript -000006E0 69 6F 6E 2E 0A 20 61 73 73 65 72 74 28 20 22 30 31 31 31 32 ion...assert(."01112 -000006F4 32 32 33 33 33 34 34 35 35 36 36 36 37 37 37 38 38 38 39 39 22333445566677788899 -00000708 39 22 20 3D 3D 0A 20 20 20 77 6F 72 64 54 6F 4E 75 6D 28 22 9".==....wordToNum(" -0000071C 45 20 7C 20 4A 20 4E 20 51 20 7C 20 52 20 57 20 58 20 7C 20 E.|.J.N.Q.|.R.W.X.|. -00000730 44 20 53 20 59 20 7C 20 46 20 54 20 7C 20 41 20 4D 20 7C 20 D.S.Y.|.F.T.|.A.M.|. -00000744 43 20 49 20 56 20 7C 20 42 20 4B 20 55 20 7C 20 4C 20 4F 20 C.I.V.|.B.K.U.|.L.O. -00000758 50 20 7C 20 47 20 48 20 5A 22 29 29 3B 0A 20 61 73 73 65 72 P.|.G.H.Z"));..asser -0000076C 74 28 20 22 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 t(."0111222333445566 -00000780 36 37 37 37 38 38 38 39 39 39 22 20 3D 3D 20 0A 20 20 20 77 6777888999".==.....w -00000794 6F 72 64 54 6F 4E 75 6D 28 22 65 20 7C 20 6A 20 6E 20 71 20 ordToNum("e.|.j.n.q. -000007A8 7C 20 72 20 77 20 78 20 7C 20 64 20 73 20 79 20 7C 20 66 20 |.r.w.x.|.d.s.y.|.f. -000007BC 74 20 7C 20 61 20 6D 20 7C 20 63 20 69 20 76 20 7C 20 62 20 t.|.a.m.|.c.i.v.|.b. -000007D0 6B 20 75 20 7C 20 6C 20 6F 20 70 20 7C 20 67 20 68 20 7A 22 k.u.|.l.o.p.|.g.h.z" -000007E4 29 29 3B 0A 20 61 73 73 65 72 74 28 20 22 30 31 32 33 34 35 ));..assert(."012345 -000007F8 36 37 38 39 22 20 3D 3D 20 0A 20 20 20 77 6F 72 64 54 6F 4E 6789".==.....wordToN -0000080C 75 6D 28 22 30 20 7C 20 20 20 31 20 20 20 7C 20 20 20 32 20 um("0.|...1...|...2. -00000820 20 20 7C 20 20 20 33 20 20 20 7C 20 20 34 20 20 7C 20 20 35 ..|...3...|..4..|..5 -00000834 20 20 7C 20 20 20 36 20 20 20 7C 20 20 20 37 20 20 20 7C 20 ..|...6...|...7...|. -00000848 20 20 38 20 20 20 7C 20 20 20 39 22 29 29 3B 0A 7D 0A 0A 76 ..8...|...9"));.}..v -0000085C 6F 69 64 20 6D 61 69 6E 28 20 73 74 72 69 6E 67 5B 5D 20 61 oid.main(.string[].a -00000870 72 67 73 20 29 0A 7B 0A 20 20 20 20 2F 2F 20 54 68 69 73 20 rgs.).{.....//.This. -00000884 61 73 73 6F 63 69 61 74 69 76 65 20 61 72 72 61 79 20 6D 61 associative.array.ma -00000898 70 73 20 61 20 6E 75 6D 62 65 72 20 74 6F 20 61 6E 20 61 72 ps.a.number.to.an.ar -000008AC 72 61 79 20 6F 66 20 77 6F 72 64 73 2E 20 20 20 20 0A 20 20 ray.of.words........ -000008C0 20 20 73 74 72 69 6E 67 61 72 72 61 79 5B 73 74 72 69 6E 67 ..stringarray[string -000008D4 5D 20 20 20 20 6E 75 6D 32 77 6F 72 64 73 3B 0A 0A 20 20 20 ]....num2words;..... -000008E8 20 66 6F 72 65 61 63 68 28 73 74 72 69 6E 67 20 77 6F 72 64 .foreach(string.word -000008FC 3B 20 6E 65 77 20 42 75 66 66 65 72 65 64 46 69 6C 65 28 22 ;.new.BufferedFile(" -00000910 64 69 63 74 69 6F 6E 61 72 79 2E 74 78 74 22 20 29 20 29 0A dictionary.txt".).). -00000924 20 20 20 20 20 20 20 20 6E 75 6D 32 77 6F 72 64 73 5B 20 77 ........num2words[.w -00000938 6F 72 64 54 6F 4E 75 6D 28 77 6F 72 64 29 20 5D 20 7E 3D 20 ordToNum(word).].~=. -0000094C 77 6F 72 64 2E 64 75 70 3B 20 20 20 20 20 20 20 20 2F 2F 20 word.dup;........//. -00000960 6D 75 73 74 20 64 75 70 0A 0A 20 20 20 20 2F 2F 2F 20 46 69 must.dup......///.Fi -00000974 6E 64 73 20 61 6C 6C 20 61 6C 74 65 72 6E 61 74 69 76 65 73 nds.all.alternatives -00000988 20 66 6F 72 20 74 68 65 20 67 69 76 65 6E 20 6E 75 6D 62 65 .for.the.given.numbe -0000099C 72 0A 20 20 20 20 2F 2F 2F 20 28 73 68 6F 75 6C 64 20 68 61 r.....///.(should.ha -000009B0 76 65 20 62 65 65 6E 20 73 74 72 69 70 70 65 64 20 66 72 6F ve.been.stripped.fro -000009C4 6D 20 6E 6F 6E 2D 64 69 67 69 74 20 63 68 61 72 61 63 74 65 m.non-digit.characte -000009D8 72 73 29 0A 20 20 20 20 73 74 72 69 6E 67 61 72 72 61 79 20 rs).....stringarray. -000009EC 5F 46 69 6E 64 57 6F 72 64 73 28 20 73 74 72 69 6E 67 20 6E _FindWords(.string.n -00000A00 75 6D 62 65 72 73 2C 20 62 6F 6F 6C 20 64 69 67 69 74 6F 6B umbers,.bool.digitok -00000A14 20 29 0A 20 20 20 20 69 6E 20 7B 0A 20 20 20 20 20 20 20 20 .).....in.{......... -00000A28 61 73 73 65 72 74 28 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 assert(numbers.lengt -00000A3C 68 20 3E 20 20 30 29 3B 20 20 20 20 0A 20 20 20 20 7D 20 20 h.>..0);.........}.. -00000A50 20 20 0A 20 20 20 20 6F 75 74 28 72 65 73 75 6C 74 29 20 7B .......out(result).{ -00000A64 0A 20 20 20 20 20 20 20 20 66 6F 72 65 61 63 68 20 28 61 3B .........foreach.(a; -00000A78 20 72 65 73 75 6C 74 29 0A 20 20 20 20 20 20 20 20 20 20 20 .result)............ -00000A8C 20 61 73 73 65 72 74 28 20 77 6F 72 64 54 6F 4E 75 6D 28 61 .assert(.wordToNum(a -00000AA0 29 20 3D 3D 20 6E 75 6D 62 65 72 73 20 29 3B 0A 20 20 20 20 ).==.numbers.);..... -00000AB4 7D 20 20 20 20 0A 20 20 20 20 62 6F 64 79 20 7B 0A 20 20 20 }.........body.{.... -00000AC8 20 20 20 20 20 73 74 72 69 6E 67 61 72 72 61 79 20 72 65 74 .....stringarray.ret -00000ADC 3B 0A 20 20 20 20 20 20 20 20 62 6F 6F 6C 20 66 6F 75 6E 64 ;.........bool.found -00000AF0 77 6F 72 64 20 3D 20 66 61 6C 73 65 3B 0A 20 20 20 20 20 20 word.=.false;....... -00000B04 20 20 66 6F 72 20 28 75 69 6E 74 20 74 3D 31 3B 20 74 3C 3D ..for.(uint.t=1;.t<= -00000B18 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 68 3B 20 2B 2B 74 29 numbers.length;.++t) -00000B2C 20 7B 0A 20 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6F 20 .{.............auto. -00000B40 61 6C 74 65 72 6E 61 74 69 76 65 73 20 3D 20 6E 75 6D 62 65 alternatives.=.numbe -00000B54 72 73 5B 30 2E 2E 74 5D 20 69 6E 20 6E 75 6D 32 77 6F 72 64 rs[0..t].in.num2word -00000B68 73 3B 0A 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 21 s;.............if.(! -00000B7C 61 6C 74 65 72 6E 61 74 69 76 65 73 29 0A 20 20 20 20 20 20 alternatives)....... -00000B90 20 20 20 20 20 20 20 20 20 20 63 6F 6E 74 69 6E 75 65 3B 0A ..........continue;. -00000BA4 20 20 20 20 20 20 20 20 20 20 20 20 66 6F 75 6E 64 77 6F 72 ............foundwor -00000BB8 64 20 3D 20 74 72 75 65 3B 0A 20 20 20 20 20 20 20 20 20 20 d.=.true;........... -00000BCC 20 20 69 66 20 28 6E 75 6D 62 65 72 73 2E 6C 65 6E 67 74 68 ..if.(numbers.length -00000BE0 20 3E 20 20 74 29 20 7B 0A 20 20 20 20 20 20 20 20 20 20 20 .>..t).{............ -00000BF4 20 20 20 20 20 2F 2F 20 43 6F 6D 62 69 6E 65 20 61 6C 6C 20 .....//.Combine.all. -00000C08 63 75 72 72 65 6E 74 20 61 6C 74 65 72 6E 61 74 69 76 65 73 current.alternatives -00000C1C 20 77 69 74 68 20 61 6C 6C 20 61 6C 74 65 72 6E 61 74 69 76 .with.all.alternativ -00000C30 65 73 20 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 es.................. -00000C44 20 20 20 20 2F 2F 20 6F 66 20 74 68 65 20 72 65 73 74 20 28 ....//.of.the.rest.( -00000C58 6E 65 78 74 20 70 69 65 63 65 20 63 61 6E 20 73 74 61 72 74 next.piece.can.start -00000C6C 20 77 69 74 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 .with.a.digit)...... -00000C80 20 20 20 20 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 .................... -00000C94 20 20 20 20 20 66 6F 72 65 61 63 68 20 28 61 32 3B 20 5F 46 .....foreach.(a2;._F -00000CA8 69 6E 64 57 6F 72 64 73 28 20 6E 75 6D 62 65 72 73 5B 74 2E indWords(.numbers[t. -00000CBC 2E 24 5D 2C 20 74 72 75 65 20 20 20 20 20 29 20 29 0A 20 20 .$],.true.....).)... -00000CD0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 66 6F ..................fo -00000CE4 72 65 61 63 68 28 61 31 3B 20 2A 61 6C 74 65 72 6E 61 74 69 reach(a1;.*alternati -00000CF8 76 65 73 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 ves)................ -00000D0C 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 61 31 20 7E 20 ........ret.~=.a1.~. -00000D20 22 20 22 20 7E 20 61 32 3B 0A 20 20 20 20 20 20 20 20 20 20 ".".~.a2;........... -00000D34 20 20 7D 0A 20 20 20 20 20 20 20 20 20 20 20 20 65 6C 73 65 ..}.............else -00000D48 20 20 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 .................... -00000D5C 20 72 65 74 20 7E 3D 20 2A 61 6C 74 65 72 6E 61 74 69 76 65 .ret.~=.*alternative -00000D70 73 3B 20 20 20 20 2F 2F 20 61 70 70 65 6E 64 20 74 68 65 73 s;....//.append.thes -00000D84 65 20 61 6C 74 65 72 6E 61 74 69 76 65 73 0A 20 20 20 20 20 e.alternatives...... -00000D98 20 20 20 7D 0A 20 20 20 20 20 20 20 20 2F 2F 20 54 72 79 20 ...}.........//.Try. -00000DAC 74 6F 20 6B 65 65 70 20 31 20 64 69 67 69 74 2C 20 6F 6E 6C to.keep.1.digit,.onl -00000DC0 79 20 69 66 20 77 65 27 72 65 20 61 6C 6C 6F 77 65 64 20 61 y.if.we're.allowed.a -00000DD4 6E 64 20 6E 6F 20 6F 74 68 65 72 0A 20 20 20 20 20 20 20 20 nd.no.other......... -00000DE8 2F 2F 20 61 6C 74 65 72 6E 61 74 69 76 65 73 20 77 65 72 65 //.alternatives.were -00000DFC 20 66 6F 75 6E 64 0A 20 20 20 20 20 20 20 20 2F 2F 20 54 65 .found.........//.Te -00000E10 73 74 69 6E 67 20 22 72 65 74 2E 6C 65 6E 67 74 68 22 20 6D sting."ret.length".m -00000E24 61 6B 65 73 20 6D 6F 72 65 20 73 65 6E 73 65 20 74 68 61 6E akes.more.sense.than -00000E38 20 74 65 73 74 69 6E 67 20 22 66 6F 75 6E 64 77 6F 72 64 22 .testing."foundword" -00000E4C 2C 0A 20 20 20 20 20 20 20 20 2F 2F 20 62 75 74 20 74 68 65 ,.........//.but.the -00000E60 20 6F 74 68 65 72 20 69 6D 70 6C 65 6D 65 6E 74 61 74 69 6F .other.implementatio -00000E74 6E 73 20 73 65 65 6D 20 74 6F 20 64 6F 20 6A 75 73 74 20 74 ns.seem.to.do.just.t -00000E88 68 69 73 2E 0A 20 20 20 20 20 20 20 20 69 66 20 28 64 69 67 his..........if.(dig -00000E9C 69 74 6F 6B 20 26 26 20 21 66 6F 75 6E 64 77 6F 72 64 29 20 itok.&&.!foundword). -00000EB0 7B 20 2F 2F 72 65 74 2E 6C 65 6E 67 74 68 20 3D 3D 20 30 20 {.//ret.length.==.0. -00000EC4 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 69 66 28 6E 75 6D ..............if(num -00000ED8 62 65 72 73 2E 6C 65 6E 67 74 68 20 3E 20 20 31 29 20 7B 0A bers.length.>..1).{. -00000EEC 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 2F 2F 20 43 ................//.C -00000F00 6F 6D 62 69 6E 65 20 31 20 64 69 67 69 74 20 77 69 74 68 20 ombine.1.digit.with. -00000F14 61 6C 6C 20 61 6C 74 65 6E 61 74 69 76 65 73 20 66 72 6F 6D all.altenatives.from -00000F28 20 74 68 65 20 72 65 73 74 20 20 20 20 0A 20 20 20 20 20 20 .the.rest........... -00000F3C 20 20 20 20 20 20 20 20 20 20 2F 2F 20 28 6E 65 78 74 20 70 ..........//.(next.p -00000F50 69 65 63 65 20 63 61 6E 20 6E 6F 74 20 73 74 61 72 74 20 77 iece.can.not.start.w -00000F64 69 74 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 20 20 ith.a.digit)........ -00000F78 20 20 0A 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 66 ...................f -00000F8C 6F 72 65 61 63 68 20 28 61 3B 20 5F 46 69 6E 64 57 6F 72 64 oreach.(a;._FindWord -00000FA0 73 28 20 6E 75 6D 62 65 72 73 5B 31 2E 2E 24 5D 2C 20 66 61 s(.numbers[1..$],.fa -00000FB4 6C 73 65 20 29 20 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 lse.).)............. -00000FC8 20 20 20 20 20 20 20 20 72 65 74 20 7E 3D 20 6E 75 6D 62 65 ........ret.~=.numbe -00000FDC 72 73 5B 30 2E 2E 31 5D 20 7E 20 22 20 22 20 7E 20 61 3B 0A rs[0..1].~.".".~.a;. -00000FF0 20 20 20 20 20 20 20 20 20 20 20 20 7D 20 20 20 20 0A 20 20 ............}....... -00001004 20 20 20 20 20 20 20 20 20 20 65 6C 73 65 20 20 20 20 0A 20 ..........else...... -00001018 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 7E ...............ret.~ -0000102C 3D 20 6E 75 6D 62 65 72 73 5B 30 2E 2E 31 5D 3B 20 20 20 20 =.numbers[0..1];.... -00001040 2F 2F 20 6A 75 73 74 20 61 70 70 65 6E 64 20 74 68 69 73 20 //.just.append.this. -00001054 64 69 67 69 74 20 20 20 20 20 20 20 20 20 20 20 20 20 0A 20 digit............... -00001068 20 20 20 20 20 20 20 7D 20 20 20 20 0A 20 20 20 20 20 20 20 .......}............ -0000107C 20 72 65 74 75 72 6E 20 72 65 74 3B 0A 20 20 20 20 7D 0A 0A .return.ret;.....}.. -00001090 20 20 20 20 2F 2F 2F 20 28 54 68 69 73 20 66 75 6E 63 74 69 ....///.(This.functi -000010A4 6F 6E 20 77 61 73 20 69 6E 6C 69 6E 65 64 20 69 6E 20 74 68 on.was.inlined.in.th -000010B8 65 20 6F 72 69 67 69 6E 61 6C 20 70 72 6F 67 72 61 6D 29 20 e.original.program). -000010CC 0A 20 20 20 20 2F 2F 2F 20 46 69 6E 64 73 20 61 6C 6C 20 61 .....///.Finds.all.a -000010E0 6C 74 65 72 6E 61 74 69 76 65 73 20 66 6F 72 20 74 68 65 20 lternatives.for.the. -000010F4 67 69 76 65 6E 20 70 68 6F 6E 65 20 6E 75 6D 62 65 72 20 0A given.phone.number.. -00001108 20 20 20 20 2F 2F 2F 20 52 65 74 75 72 6E 73 3A 20 61 72 72 ....///.Returns:.arr -0000111C 61 79 20 6F 66 20 73 74 72 69 6E 67 73 20 0A 20 20 20 20 73 ay.of.strings......s -00001130 74 72 69 6E 67 61 72 72 61 79 20 46 69 6E 64 57 6F 72 64 73 tringarray.FindWords -00001144 28 20 73 74 72 69 6E 67 20 70 68 6F 6E 65 5F 6E 75 6D 62 65 (.string.phone_numbe -00001158 72 20 29 0A 20 20 20 20 7B 0A 20 20 20 20 20 20 20 20 69 66 r.).....{.........if -0000116C 20 28 21 70 68 6F 6E 65 5F 6E 75 6D 62 65 72 2E 6C 65 6E 67 .(!phone_number.leng -00001180 74 68 29 0A 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 75 th).............retu -00001194 72 6E 20 6E 75 6C 6C 3B 0A 20 20 20 20 20 20 20 20 2F 2F 20 rn.null;.........//. -000011A8 53 74 72 69 70 20 74 68 65 20 6E 6F 6E 2D 64 69 67 69 74 20 Strip.the.non-digit. -000011BC 63 68 61 72 61 63 74 65 72 73 20 66 72 6F 6D 20 74 68 65 20 characters.from.the. -000011D0 70 68 6F 6E 65 20 6E 75 6D 62 65 72 2C 20 61 6E 64 0A 20 20 phone.number,.and... -000011E4 20 20 20 20 20 20 2F 2F 20 70 61 73 73 20 69 74 20 74 6F 20 ......//.pass.it.to. -000011F8 74 68 65 20 72 65 63 75 72 73 69 76 65 20 66 75 6E 63 74 69 the.recursive.functi -0000120C 6F 6E 20 28 6C 65 61 64 69 6E 67 20 64 69 67 69 74 20 69 73 on.(leading.digit.is -00001220 20 61 6C 6C 6F 77 65 64 29 0A 20 20 20 20 20 20 20 20 72 65 .allowed).........re -00001234 74 75 72 6E 20 5F 46 69 6E 64 57 6F 72 64 73 28 20 73 74 72 turn._FindWords(.str -00001248 69 70 4E 6F 6E 44 69 67 69 74 28 70 68 6F 6E 65 5F 6E 75 6D ipNonDigit(phone_num -0000125C 62 65 72 29 2C 20 74 72 75 65 20 29 3B 20 20 20 20 0A 20 20 ber),.true.);....... -00001270 20 20 7D 20 20 20 20 0A 20 20 20 20 0A 20 20 20 20 2F 2F 20 ..}..............//. -00001284 52 65 61 64 20 74 68 65 20 70 68 6F 6E 65 20 6E 75 6D 62 65 Read.the.phone.numbe -00001298 72 73 20 20 20 20 20 0A 20 20 20 20 66 6F 72 65 61 63 68 28 rs..........foreach( -000012AC 73 74 72 69 6E 67 20 70 68 6F 6E 65 3B 20 6E 65 77 20 42 75 string.phone;.new.Bu -000012C0 66 66 65 72 65 64 46 69 6C 65 28 22 69 6E 70 75 74 2E 74 78 fferedFile("input.tx -000012D4 74 22 20 20 20 29 20 29 0A 20 20 20 20 20 20 20 20 66 6F 72 t"...).).........for -000012E8 65 61 63 68 28 61 6C 74 65 72 6E 61 74 69 76 65 3B 20 46 69 each(alternative;.Fi -000012FC 6E 64 57 6F 72 64 73 28 20 70 68 6F 6E 65 20 29 20 29 0A 20 ndWords(.phone.).).. -00001310 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 66 6C 6E 28 ...........writefln( -00001324 70 68 6F 6E 65 2C 20 22 3A 20 22 2C 20 61 6C 74 65 72 6E 61 phone,.":.",.alterna -00001338 74 69 76 65 20 29 3B 0A 7D 0A 0A tive.);.}.. diff --git a/tests/examplefiles/hexdump_hexdump b/tests/examplefiles/hexdump_hexdump deleted file mode 100644 index 06c2e861..00000000 --- a/tests/examplefiles/hexdump_hexdump +++ /dev/null @@ -1,310 +0,0 @@ -0000000 2f2f 4320 6572 7461 6465 6220 2079 694c -0000010 6e6f 6c65 6f6c 4c20 6e75 7365 2075 6e61 -0000020 2064 6c70 6361 6465 6920 206e 6874 2065 -0000030 7570 6c62 6369 6420 6d6f 6961 2e6e 2f0a -0000040 202f 6854 7369 6620 6c69 2065 6168 2073 -0000050 6562 6e65 6d20 646f 6669 6569 2064 7266 -0000060 6d6f 6920 7374 6f20 6972 6967 616e 206c -0000070 6576 7372 6f69 2e6e 2f0a 202f 7449 6820 -0000080 7361 6220 6565 206e 6f66 6d72 7461 6574 -0000090 2064 6f74 6620 7469 7920 756f 2072 6373 -00000a0 6572 6e65 0a2e 6f6d 7564 656c 7020 6f68 -00000b0 656e 6f6e 203b 2020 2020 2f2f 6f20 7470 -00000c0 6f69 616e 0a6c 6d69 6f70 7472 7320 6474 -00000d0 732e 6474 6f69 203b 2020 2f2f 7720 6972 -00000e0 6574 6c66 206e 2020 2020 690a 706d 726f -00000f0 2074 7473 2e64 7463 7079 3b65 2020 2f20 -0000100 202f 7369 6964 6967 2074 2020 2020 690a -0000110 706d 726f 2074 7473 2e64 7473 6572 6d61 -0000120 203b 2f20 202f 7542 6666 7265 6465 6946 -0000130 656c 0a0a 2f2f 4a20 7375 2074 6f66 2072 -0000140 6572 6461 6261 6c69 7469 2079 6928 616d -0000150 6967 656e 6320 6168 5b72 5b5d 5b5d 6863 -0000160 7261 5d5b 295d 2020 2020 610a 696c 7361 -0000170 6320 6168 5b72 205d 7473 6972 676e 0a3b -0000180 6c61 6169 2073 7473 6972 676e 5d5b 7320 -0000190 7274 6e69 6167 7272 7961 0a3b 2f0a 2f2f -00001a0 5320 7274 7069 2073 6f6e 2d6e 6964 6967 -00001b0 2074 6863 7261 6361 6574 7372 6620 6f72 -00001c0 206d 6874 2065 7473 6972 676e 2820 4f43 -00001d0 2957 730a 7274 6e69 2067 7473 6972 4e70 -00001e0 6e6f 6944 6967 2874 6920 206e 7473 6972 -00001f0 676e 6c20 6e69 2065 2029 7b0a 200a 2020 -0000200 7320 7274 6e69 2067 6572 3b74 200a 2020 -0000210 6620 726f 6165 6863 7528 6e69 2074 2c69 -0000220 6320 203b 696c 656e 2029 0a7b 2020 2020 -0000230 2020 2020 2f2f 4520 7272 726f 203a 7473 -0000240 2e64 7463 7079 2e65 7369 6964 6967 2074 -0000250 7461 4320 5c3a 6d64 5c64 7273 5c63 6870 -0000260 626f 736f 735c 6474 635c 7974 6570 642e -0000270 3328 2937 0a20 2020 2020 2020 2020 2f2f -0000280 6320 6e6f 6c66 6369 7374 7720 7469 2068 -0000290 7473 2e64 7473 6572 6d61 692e 6473 6769 -00002a0 7469 6120 2074 3a43 645c 646d 735c 6372 -00002b0 705c 6f68 6f62 5c73 7473 5c64 7473 6572 -00002c0 6d61 642e 3228 3239 2934 200a 2020 2020 -00002d0 2020 6920 2066 2128 7473 2e64 7463 7079 -00002e0 2e65 7369 6964 6967 2874 2963 2029 0a7b -00002f0 2020 2020 2020 2020 2020 2020 6669 2820 -0000300 7221 7465 0a29 2020 2020 2020 2020 2020 -0000310 2020 2020 2020 6572 2074 203d 696c 656e -0000320 305b 2e2e 5d69 203b 2020 0a20 2020 2020 -0000330 2020 2020 207d 2020 0a20 2020 2020 2020 -0000340 2020 6c65 6573 6920 2066 7228 7465 0a29 -0000350 2020 2020 2020 2020 2020 2020 6572 2074 -0000360 3d7e 6320 203b 2020 0a20 2020 2020 207d -0000370 2020 0a20 2020 2020 6572 7574 6e72 7220 -0000380 7465 723f 7465 6c3a 6e69 3b65 7d0a 0a0a -0000390 6e75 7469 6574 7473 7b20 200a 2020 6120 -00003a0 7373 7265 2874 7320 7274 7069 6f4e 446e -00003b0 6769 7469 2228 7361 6664 2922 3d20 203d -00003c0 2222 2020 3b29 200a 2020 6120 7373 7265 -00003d0 2874 7320 7274 7069 6f4e 446e 6769 7469 -00003e0 2228 275c 3331 3d2d 2032 6b34 706f 2922 -00003f0 3d20 203d 2220 3331 3432 2022 2920 0a3b -0000400 0a7d 2f0a 2f2f 4320 6e6f 6576 7472 2073 -0000410 2061 6f77 6472 6920 746e 206f 2061 756e -0000420 626d 7265 202c 6769 6f6e 6972 676e 6120 -0000430 6c6c 6e20 6e6f 6120 706c 6168 6320 6168 -0000440 6172 7463 7265 2073 0a20 7473 6972 676e -0000450 7720 726f 5464 4e6f 6d75 2028 6e69 7320 -0000460 7274 6e69 2067 6f77 6472 2920 7b0a 2f0a -0000470 202f 7274 6e61 6c73 7461 6f69 206e 6174 -0000480 6c62 2065 6f66 2072 6874 2065 6174 6b73 -0000490 6120 2074 6168 646e 630a 6e6f 7473 6320 -00004a0 6168 5b72 3532 5d36 5420 4152 534e 414c -00004b0 4554 3d20 2020 2020 200a 2020 2220 2020 -00004c0 2020 2020 2020 2020 2020 2020 2020 2020 -00004d0 2020 2020 2020 2020 2020 2020 2020 2022 -00004e0 2f20 202f 2030 2020 200a 2020 2220 2020 -00004f0 2020 2020 2020 2020 2020 2020 2020 3130 -0000500 3332 3534 3736 3938 2020 2020 2020 2022 -0000510 2f20 202f 3233 2020 2020 0a20 2020 2020 -0000520 2022 3735 3336 3430 3939 3136 3837 3135 -0000530 3838 3231 3433 3637 3232 3933 2020 2020 -0000540 2220 2020 2f2f 3620 2034 2020 200a 2020 -0000550 2220 3520 3637 3033 3934 3639 3731 3538 -0000560 3831 3138 3332 3734 3236 3332 2039 2020 -0000570 2020 0a22 2020 2020 2022 2020 2020 2020 -0000580 2020 2020 2020 2020 2020 2020 2020 2020 -0000590 2020 2020 2020 2020 2220 200a 2020 2220 -00005a0 2020 2020 2020 2020 2020 2020 2020 2020 -* -00005c0 0a22 2020 2020 2022 2020 2020 2020 2020 -00005d0 2020 2020 2020 2020 2020 2020 2020 2020 -00005e0 2020 2020 2020 2220 2020 2020 200a 2020 -00005f0 2220 2020 2020 2020 2020 2020 2020 2020 -0000600 2020 2020 2020 2020 2020 2020 2020 2020 -0000610 2020 3b22 200a 2020 7320 7274 6e69 2067 -0000620 6572 3b74 200a 2020 6620 726f 6165 6863 -0000630 6328 203b 6163 7473 7528 7962 6574 5d5b -0000640 7729 726f 2964 200a 2020 2020 2020 6920 -0000650 2066 5428 4152 534e 414c 4554 635b 205d -0000660 3d21 2720 2720 0a29 2020 2020 2020 2020 -0000670 2020 2020 6572 2074 3d7e 5420 4152 534e -0000680 414c 4554 635b 3b5d 200a 2020 7220 7465 -0000690 7275 206e 6572 3b74 7d0a 0a0a 6e75 7469 -00006a0 6574 7473 7b20 200a 2f2f 5420 7365 2074 -00006b0 6f77 6472 6f54 754e 206d 7375 6e69 2067 -00006c0 6874 2065 6174 6c62 2065 7266 6d6f 7420 -00006d0 6568 7420 7361 206b 6564 6373 6972 7470 -00006e0 6f69 2e6e 200a 7361 6573 7472 2028 3022 -00006f0 3131 3231 3232 3333 3433 3534 3635 3636 -0000700 3737 3837 3838 3939 2239 3d20 0a3d 2020 -0000710 7720 726f 5464 4e6f 6d75 2228 2045 207c -0000720 204a 204e 2051 207c 2052 2057 2058 207c -0000730 2044 2053 2059 207c 2046 2054 207c 2041 -0000740 204d 207c 2043 2049 2056 207c 2042 204b -0000750 2055 207c 204c 204f 2050 207c 2047 2048 -0000760 225a 2929 0a3b 6120 7373 7265 2874 2220 -0000770 3130 3131 3232 3332 3333 3434 3535 3636 -0000780 3736 3737 3838 3938 3939 2022 3d3d 0a20 -0000790 2020 7720 726f 5464 4e6f 6d75 2228 2065 -00007a0 207c 206a 206e 2071 207c 2072 2077 2078 -00007b0 207c 2064 2073 2079 207c 2066 2074 207c -00007c0 2061 206d 207c 2063 2069 2076 207c 2062 -00007d0 206b 2075 207c 206c 206f 2070 207c 2067 -00007e0 2068 227a 2929 0a3b 6120 7373 7265 2874 -00007f0 2220 3130 3332 3534 3736 3938 2022 3d3d -0000800 0a20 2020 7720 726f 5464 4e6f 6d75 2228 -0000810 2030 207c 2020 2031 2020 207c 2020 2032 -0000820 2020 207c 2020 2033 2020 207c 3420 2020 -0000830 207c 3520 2020 207c 2020 2036 2020 207c -0000840 2020 2037 2020 207c 2020 2038 2020 207c -0000850 2020 2239 2929 0a3b 0a7d 760a 696f 2064 -0000860 616d 6e69 2028 7473 6972 676e 5d5b 6120 -0000870 6772 2073 0a29 0a7b 2020 2020 2f2f 5420 -0000880 6968 2073 7361 6f73 6963 7461 7669 2065 -0000890 7261 6172 2079 616d 7370 6120 6e20 6d75 -00008a0 6562 2072 6f74 6120 206e 7261 6172 2079 -00008b0 666f 7720 726f 7364 202e 2020 0a20 2020 -00008c0 2020 7473 6972 676e 7261 6172 5b79 7473 -00008d0 6972 676e 205d 2020 6e20 6d75 7732 726f -00008e0 7364 0a3b 200a 2020 6620 726f 6165 6863 -00008f0 7328 7274 6e69 2067 6f77 6472 203b 656e -0000900 2077 7542 6666 7265 6465 6946 656c 2228 -0000910 6964 7463 6f69 616e 7972 742e 7478 2022 -0000920 2029 0a29 2020 2020 2020 2020 756e 326d -0000930 6f77 6472 5b73 7720 726f 5464 4e6f 6d75 -0000940 7728 726f 2964 5d20 7e20 203d 6f77 6472 -0000950 642e 7075 203b 2020 2020 2020 2f20 202f -0000960 756d 7473 6420 7075 0a0a 2020 2020 2f2f -0000970 202f 6946 646e 2073 6c61 206c 6c61 6574 -0000980 6e72 7461 7669 7365 6620 726f 7420 6568 -0000990 6720 7669 6e65 6e20 6d75 6562 0a72 2020 -00009a0 2020 2f2f 202f 7328 6f68 6c75 2064 6168 -00009b0 6576 6220 6565 206e 7473 6972 7070 6465 -00009c0 6620 6f72 206d 6f6e 2d6e 6964 6967 2074 -00009d0 6863 7261 6361 6574 7372 0a29 2020 2020 -00009e0 7473 6972 676e 7261 6172 2079 465f 6e69 -00009f0 5764 726f 7364 2028 7473 6972 676e 6e20 -0000a00 6d75 6562 7372 202c 6f62 6c6f 6420 6769 -0000a10 7469 6b6f 2920 200a 2020 6920 206e 0a7b -0000a20 2020 2020 2020 2020 7361 6573 7472 6e28 -0000a30 6d75 6562 7372 6c2e 6e65 7467 2068 203e -0000a40 3020 3b29 2020 2020 200a 2020 7d20 2020 -0000a50 2020 200a 2020 6f20 7475 7228 7365 6c75 -0000a60 2974 7b20 200a 2020 2020 2020 6620 726f -0000a70 6165 6863 2820 3b61 7220 7365 6c75 2974 -0000a80 200a 2020 2020 2020 2020 2020 6120 7373 -0000a90 7265 2874 7720 726f 5464 4e6f 6d75 6128 -0000aa0 2029 3d3d 6e20 6d75 6562 7372 2920 0a3b -0000ab0 2020 2020 207d 2020 0a20 2020 2020 6f62 -0000ac0 7964 7b20 200a 2020 2020 2020 7320 7274 -0000ad0 6e69 6167 7272 7961 7220 7465 0a3b 2020 -0000ae0 2020 2020 2020 6f62 6c6f 6620 756f 646e -0000af0 6f77 6472 3d20 6620 6c61 6573 0a3b 2020 -0000b00 2020 2020 2020 6f66 2072 7528 6e69 2074 -0000b10 3d74 3b31 7420 3d3c 756e 626d 7265 2e73 -0000b20 656c 676e 6874 203b 2b2b 2974 7b20 200a -0000b30 2020 2020 2020 2020 2020 6120 7475 206f -0000b40 6c61 6574 6e72 7461 7669 7365 3d20 6e20 -0000b50 6d75 6562 7372 305b 2e2e 5d74 6920 206e -0000b60 756e 326d 6f77 6472 3b73 200a 2020 2020 -0000b70 2020 2020 2020 6920 2066 2128 6c61 6574 -0000b80 6e72 7461 7669 7365 0a29 2020 2020 2020 -0000b90 2020 2020 2020 2020 2020 6f63 746e 6e69 -0000ba0 6575 0a3b 2020 2020 2020 2020 2020 2020 -0000bb0 6f66 6e75 7764 726f 2064 203d 7274 6575 -0000bc0 0a3b 2020 2020 2020 2020 2020 2020 6669 -0000bd0 2820 756e 626d 7265 2e73 656c 676e 6874 -0000be0 3e20 2020 2974 7b20 200a 2020 2020 2020 -0000bf0 2020 2020 2020 2020 2f20 202f 6f43 626d -0000c00 6e69 2065 6c61 206c 7563 7272 6e65 2074 -0000c10 6c61 6574 6e72 7461 7669 7365 7720 7469 -0000c20 2068 6c61 206c 6c61 6574 6e72 7461 7669 -0000c30 7365 2020 2020 0a20 2020 2020 2020 2020 -0000c40 2020 2020 2020 2020 2f2f 6f20 2066 6874 -0000c50 2065 6572 7473 2820 656e 7478 7020 6569 -0000c60 6563 6320 6e61 7320 6174 7472 7720 7469 -0000c70 2068 2061 6964 6967 2974 2020 2020 2020 -0000c80 2020 2020 2020 2020 200a 2020 2020 2020 -0000c90 2020 2020 2020 2020 6620 726f 6165 6863 -0000ca0 2820 3261 203b 465f 6e69 5764 726f 7364 -0000cb0 2028 756e 626d 7265 5b73 2e74 242e 2c5d -0000cc0 7420 7572 2065 2020 2020 2029 0a29 2020 -0000cd0 2020 2020 2020 2020 2020 2020 2020 2020 -0000ce0 2020 6f66 6572 6361 2868 3161 203b 612a -0000cf0 746c 7265 616e 6974 6576 2973 200a 2020 -0000d00 2020 2020 2020 2020 2020 2020 2020 2020 -0000d10 2020 2020 6572 2074 3d7e 6120 2031 207e -0000d20 2022 2022 207e 3261 0a3b 2020 2020 2020 -0000d30 2020 2020 2020 0a7d 2020 2020 2020 2020 -0000d40 2020 2020 6c65 6573 2020 2020 200a 2020 -0000d50 2020 2020 2020 2020 2020 2020 7220 7465 -0000d60 7e20 203d 612a 746c 7265 616e 6974 6576 -0000d70 3b73 2020 2020 2f2f 6120 7070 6e65 2064 -0000d80 6874 7365 2065 6c61 6574 6e72 7461 7669 -0000d90 7365 200a 2020 2020 2020 7d20 200a 2020 -0000da0 2020 2020 2f20 202f 7254 2079 6f74 6b20 -0000db0 6565 2070 2031 6964 6967 2c74 6f20 6c6e -0000dc0 2079 6669 7720 2765 6572 6120 6c6c 776f -0000dd0 6465 6120 646e 6e20 206f 746f 6568 0a72 -0000de0 2020 2020 2020 2020 2f2f 6120 746c 7265 -0000df0 616e 6974 6576 2073 6577 6572 6620 756f -0000e00 646e 200a 2020 2020 2020 2f20 202f 6554 -0000e10 7473 6e69 2067 7222 7465 6c2e 6e65 7467 -0000e20 2268 6d20 6b61 7365 6d20 726f 2065 6573 -0000e30 736e 2065 6874 6e61 7420 7365 6974 676e -0000e40 2220 6f66 6e75 7764 726f 2264 0a2c 2020 -0000e50 2020 2020 2020 2f2f 6220 7475 7420 6568 -0000e60 6f20 6874 7265 6920 706d 656c 656d 746e -0000e70 7461 6f69 736e 7320 6565 206d 6f74 6420 -0000e80 206f 756a 7473 7420 6968 2e73 200a 2020 -0000e90 2020 2020 6920 2066 6428 6769 7469 6b6f -0000ea0 2620 2026 6621 756f 646e 6f77 6472 2029 -0000eb0 207b 2f2f 6572 2e74 656c 676e 6874 3d20 -0000ec0 203d 2030 0a20 2020 2020 2020 2020 2020 -0000ed0 2020 6669 6e28 6d75 6562 7372 6c2e 6e65 -0000ee0 7467 2068 203e 3120 2029 0a7b 2020 2020 -0000ef0 2020 2020 2020 2020 2020 2020 2f2f 4320 -0000f00 6d6f 6962 656e 3120 6420 6769 7469 7720 -0000f10 7469 2068 6c61 206c 6c61 6574 616e 6974 -0000f20 6576 2073 7266 6d6f 7420 6568 7220 7365 -0000f30 2074 2020 0a20 2020 2020 2020 2020 2020 -0000f40 2020 2020 2020 2f2f 2820 656e 7478 7020 -0000f50 6569 6563 6320 6e61 6e20 746f 7320 6174 -0000f60 7472 7720 7469 2068 2061 6964 6967 2974 -0000f70 2020 2020 2020 2020 2020 200a 2020 2020 -0000f80 2020 2020 2020 2020 2020 6620 726f 6165 -0000f90 6863 2820 3b61 5f20 6946 646e 6f57 6472 -0000fa0 2873 6e20 6d75 6562 7372 315b 2e2e 5d24 -0000fb0 202c 6166 736c 2065 2029 0a29 2020 2020 -0000fc0 2020 2020 2020 2020 2020 2020 2020 2020 -0000fd0 6572 2074 3d7e 6e20 6d75 6562 7372 305b -0000fe0 2e2e 5d31 7e20 2220 2220 7e20 6120 0a3b -0000ff0 2020 2020 2020 2020 2020 2020 207d 2020 -0001000 0a20 2020 2020 2020 2020 2020 2020 6c65 -0001010 6573 2020 2020 200a 2020 2020 2020 2020 -0001020 2020 2020 2020 7220 7465 7e20 203d 756e -0001030 626d 7265 5b73 2e30 312e 3b5d 2020 2020 -0001040 2f2f 6a20 7375 2074 7061 6570 646e 7420 -0001050 6968 2073 6964 6967 2074 2020 2020 2020 -0001060 2020 2020 2020 200a 2020 2020 2020 7d20 -0001070 2020 2020 200a 2020 2020 2020 7220 7465 -0001080 7275 206e 6572 3b74 200a 2020 7d20 0a0a -0001090 2020 2020 2f2f 202f 5428 6968 2073 7566 -00010a0 636e 6974 6e6f 7720 7361 6920 6c6e 6e69 -00010b0 6465 6920 206e 6874 2065 726f 6769 6e69 -00010c0 6c61 7020 6f72 7267 6d61 2029 200a 2020 -00010d0 2f20 2f2f 4620 6e69 7364 6120 6c6c 6120 -00010e0 746c 7265 616e 6974 6576 2073 6f66 2072 -00010f0 6874 2065 6967 6576 206e 6870 6e6f 2065 -0001100 756e 626d 7265 0a20 2020 2020 2f2f 202f -0001110 6552 7574 6e72 3a73 6120 7272 7961 6f20 -0001120 2066 7473 6972 676e 2073 200a 2020 7320 -0001130 7274 6e69 6167 7272 7961 4620 6e69 5764 -0001140 726f 7364 2028 7473 6972 676e 7020 6f68 -0001150 656e 6e5f 6d75 6562 2072 0a29 2020 2020 -0001160 0a7b 2020 2020 2020 2020 6669 2820 7021 -0001170 6f68 656e 6e5f 6d75 6562 2e72 656c 676e -0001180 6874 0a29 2020 2020 2020 2020 2020 2020 -0001190 6572 7574 6e72 6e20 6c75 3b6c 200a 2020 -00011a0 2020 2020 2f20 202f 7453 6972 2070 6874 -00011b0 2065 6f6e 2d6e 6964 6967 2074 6863 7261 -00011c0 6361 6574 7372 6620 6f72 206d 6874 2065 -00011d0 6870 6e6f 2065 756e 626d 7265 202c 6e61 -00011e0 0a64 2020 2020 2020 2020 2f2f 7020 7361 -00011f0 2073 7469 7420 206f 6874 2065 6572 7563 -0001200 7372 7669 2065 7566 636e 6974 6e6f 2820 -0001210 656c 6461 6e69 2067 6964 6967 2074 7369 -0001220 6120 6c6c 776f 6465 0a29 2020 2020 2020 -0001230 2020 6572 7574 6e72 5f20 6946 646e 6f57 -0001240 6472 2873 7320 7274 7069 6f4e 446e 6769 -0001250 7469 7028 6f68 656e 6e5f 6d75 6562 2972 -0001260 202c 7274 6575 2920 203b 2020 0a20 2020 -0001270 2020 207d 2020 0a20 2020 2020 200a 2020 -0001280 2f20 202f 6552 6461 7420 6568 7020 6f68 -0001290 656e 6e20 6d75 6562 7372 2020 2020 0a20 -00012a0 2020 2020 6f66 6572 6361 2868 7473 6972 -00012b0 676e 7020 6f68 656e 203b 656e 2077 7542 -00012c0 6666 7265 6465 6946 656c 2228 6e69 7570 -00012d0 2e74 7874 2274 2020 2920 2920 200a 2020 -00012e0 2020 2020 6620 726f 6165 6863 6128 746c -00012f0 7265 616e 6974 6576 203b 6946 646e 6f57 -0001300 6472 2873 7020 6f68 656e 2920 2920 200a -0001310 2020 2020 2020 2020 2020 7720 6972 6574 -0001320 6c66 286e 6870 6e6f 2c65 2220 203a 2c22 -0001330 6120 746c 7265 616e 6974 6576 2920 0a3b -0001340 0a7d 000a -0001343 diff --git a/tests/examplefiles/hexdump_od b/tests/examplefiles/hexdump_od deleted file mode 100644 index a407aef0..00000000 --- a/tests/examplefiles/hexdump_od +++ /dev/null @@ -1,310 +0,0 @@ -0000000 2f 2f 20 43 72 65 61 74 65 64 20 62 79 20 4c 69 >// Created by Li< -0000020 6f 6e 65 6c 6c 6f 20 4c 75 6e 65 73 75 20 61 6e >onello Lunesu an< -0000040 64 20 70 6c 61 63 65 64 20 69 6e 20 74 68 65 20 >d placed in the < -0000060 70 75 62 6c 69 63 20 64 6f 6d 61 69 6e 2e 0a 2f >public domain../< -0000100 2f 20 54 68 69 73 20 66 69 6c 65 20 68 61 73 20 >/ This file has < -0000120 62 65 65 6e 20 6d 6f 64 69 66 69 65 64 20 66 72 >been modified fr< -0000140 6f 6d 20 69 74 73 20 6f 72 69 67 69 6e 61 6c 20 >om its original < -0000160 76 65 72 73 69 6f 6e 2e 0a 2f 2f 20 49 74 20 68 >version..// It h< -0000200 61 73 20 62 65 65 6e 20 66 6f 72 6d 61 74 74 65 >as been formatte< -0000220 64 20 74 6f 20 66 69 74 20 79 6f 75 72 20 73 63 >d to fit your sc< -0000240 72 65 65 6e 2e 0a 6d 6f 64 75 6c 65 20 70 68 6f >reen..module pho< -0000260 6e 65 6e 6f 3b 20 20 20 20 20 2f 2f 20 6f 70 74 >neno; // opt< -0000300 69 6f 6e 61 6c 0a 69 6d 70 6f 72 74 20 73 74 64 >ional.import std< -0000320 2e 73 74 64 69 6f 3b 20 20 20 2f 2f 20 77 72 69 >.stdio; // wri< -0000340 74 65 66 6c 6e 20 20 20 20 20 0a 69 6d 70 6f 72 >tefln .impor< -0000360 74 20 73 74 64 2e 63 74 79 70 65 3b 20 20 20 2f >t std.ctype; /< -0000400 2f 20 69 73 64 69 67 69 74 20 20 20 20 20 0a 69 >/ isdigit .i< -0000420 6d 70 6f 72 74 20 73 74 64 2e 73 74 72 65 61 6d >mport std.stream< -0000440 3b 20 20 2f 2f 20 42 75 66 66 65 72 65 64 46 69 >; // BufferedFi< -0000460 6c 65 0a 0a 2f 2f 20 4a 75 73 74 20 66 6f 72 20 >le..// Just for < -0000500 72 65 61 64 61 62 69 6c 69 74 79 20 28 69 6d 61 >readability (ima< -0000520 67 69 6e 65 20 63 68 61 72 5b 5d 5b 5d 5b 63 68 >gine char[][][ch< -0000540 61 72 5b 5d 5d 29 20 20 20 20 0a 61 6c 69 61 73 >ar[]]) .alias< -0000560 20 63 68 61 72 5b 5d 20 73 74 72 69 6e 67 3b 0a > char[] string;.< -0000600 61 6c 69 61 73 20 73 74 72 69 6e 67 5b 5d 20 73 >alias string[] s< -0000620 74 72 69 6e 67 61 72 72 61 79 3b 0a 0a 2f 2f 2f >tringarray;..///< -0000640 20 53 74 72 69 70 73 20 6e 6f 6e 2d 64 69 67 69 > Strips non-digi< -0000660 74 20 63 68 61 72 61 63 74 65 72 73 20 66 72 6f >t characters fro< -0000700 6d 20 74 68 65 20 73 74 72 69 6e 67 20 28 43 4f >m the string (CO< -0000720 57 29 0a 73 74 72 69 6e 67 20 73 74 72 69 70 4e >W).string stripN< -0000740 6f 6e 44 69 67 69 74 28 20 69 6e 20 73 74 72 69 >onDigit( in stri< -0000760 6e 67 20 6c 69 6e 65 20 29 20 0a 7b 0a 20 20 20 >ng line ) .{. < -0001000 20 73 74 72 69 6e 67 20 72 65 74 3b 0a 20 20 20 > string ret;. < -0001020 20 66 6f 72 65 61 63 68 28 75 69 6e 74 20 69 2c > foreach(uint i,< -0001040 20 63 3b 20 6c 69 6e 65 29 20 7b 0a 20 20 20 20 > c; line) {. < -0001060 20 20 20 20 2f 2f 20 45 72 72 6f 72 3a 20 73 74 > // Error: st< -0001100 64 2e 63 74 79 70 65 2e 69 73 64 69 67 69 74 20 >d.ctype.isdigit < -0001120 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 5c 70 68 >at C:\dmd\src\ph< -0001140 6f 62 6f 73 5c 73 74 64 5c 63 74 79 70 65 2e 64 >obos\std\ctype.d< -0001160 28 33 37 29 20 0a 20 20 20 20 20 20 20 20 2f 2f >(37) . //< -0001200 20 63 6f 6e 66 6c 69 63 74 73 20 77 69 74 68 20 > conflicts with < -0001220 73 74 64 2e 73 74 72 65 61 6d 2e 69 73 64 69 67 >std.stream.isdig< -0001240 69 74 20 61 74 20 43 3a 5c 64 6d 64 5c 73 72 63 >it at C:\dmd\src< -0001260 5c 70 68 6f 62 6f 73 5c 73 74 64 5c 73 74 72 65 >\phobos\std\stre< -0001300 61 6d 2e 64 28 32 39 32 34 29 0a 20 20 20 20 20 >am.d(2924). < -0001320 20 20 20 69 66 20 28 21 73 74 64 2e 63 74 79 70 > if (!std.ctyp< -0001340 65 2e 69 73 64 69 67 69 74 28 63 29 29 20 7b 0a >e.isdigit(c)) {.< -0001360 20 20 20 20 20 20 20 20 20 20 20 20 69 66 20 28 > if (< -0001400 21 72 65 74 29 0a 20 20 20 20 20 20 20 20 20 20 >!ret). < -0001420 20 20 20 20 20 20 72 65 74 20 3d 20 6c 69 6e 65 > ret = line< -0001440 5b 30 2e 2e 69 5d 3b 20 20 20 20 0a 20 20 20 20 >[0..i]; . < -0001460 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 20 20 > } . < -0001500 20 20 65 6c 73 65 20 69 66 20 28 72 65 74 29 0a > else if (ret).< -0001520 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 20 > ret < -0001540 7e 3d 20 63 3b 20 20 20 20 0a 20 20 20 20 7d 20 >~= c; . } < -0001560 20 20 20 0a 20 20 20 20 72 65 74 75 72 6e 20 72 > . return r< -0001600 65 74 3f 72 65 74 3a 6c 69 6e 65 3b 0a 7d 0a 0a >et?ret:line;.}..< -0001620 75 6e 69 74 74 65 73 74 20 7b 0a 20 20 20 20 61 >unittest {. a< -0001640 73 73 65 72 74 28 20 73 74 72 69 70 4e 6f 6e 44 >ssert( stripNonD< -0001660 69 67 69 74 28 22 61 73 64 66 22 29 20 3d 3d 20 >igit("asdf") == < -0001700 22 22 20 20 29 3b 0a 20 20 20 20 61 73 73 65 72 >"" );. asser< -0001720 74 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 69 74 >t( stripNonDigit< -0001740 28 22 5c 27 31 33 2d 3d 32 20 34 6b 6f 70 22 29 >("\'13-=2 4kop")< -0001760 20 3d 3d 20 20 22 31 33 32 34 22 20 20 29 3b 0a > == "1324" );.< -0002000 7d 0a 0a 2f 2f 2f 20 43 6f 6e 76 65 72 74 73 20 >}../// Converts < -0002020 61 20 77 6f 72 64 20 69 6e 74 6f 20 61 20 6e 75 >a word into a nu< -0002040 6d 62 65 72 2c 20 69 67 6e 6f 72 69 6e 67 20 61 >mber, ignoring a< -0002060 6c 6c 20 6e 6f 6e 20 61 6c 70 68 61 20 63 68 61 >ll non alpha cha< -0002100 72 61 63 74 65 72 73 20 20 0a 73 74 72 69 6e 67 >racters .string< -0002120 20 77 6f 72 64 54 6f 4e 75 6d 28 20 69 6e 20 73 > wordToNum( in s< -0002140 74 72 69 6e 67 20 77 6f 72 64 20 29 0a 7b 0a 2f >tring word ).{./< -0002160 2f 20 74 72 61 6e 73 6c 61 74 69 6f 6e 20 74 61 >/ translation ta< -0002200 62 6c 65 20 66 6f 72 20 74 68 65 20 74 61 73 6b >ble for the task< -0002220 20 61 74 20 68 61 6e 64 0a 63 6f 6e 73 74 20 63 > at hand.const c< -0002240 68 61 72 5b 32 35 36 5d 20 54 52 41 4e 53 4c 41 >har[256] TRANSLA< -0002260 54 45 20 3d 20 20 20 20 0a 20 20 20 20 22 20 20 >TE = . " < -0002300 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -0002320 20 20 20 20 20 20 20 20 20 20 20 20 20 20 22 20 > " < -0002340 20 2f 2f 20 30 20 20 20 0a 20 20 20 20 22 20 20 > // 0 . " < -0002360 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 31 > 01< -0002400 32 33 34 35 36 37 38 39 20 20 20 20 20 20 22 20 >23456789 " < -0002420 20 2f 2f 20 33 32 20 20 20 20 20 0a 20 20 20 20 > // 32 . < -0002440 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 31 >" 57630499617851< -0002460 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 20 >881234762239 < -0002500 20 22 20 20 2f 2f 20 36 34 20 20 20 0a 20 20 20 > " // 64 . < -0002520 20 22 20 35 37 36 33 30 34 39 39 36 31 37 38 35 > " 5763049961785< -0002540 31 38 38 31 32 33 34 37 36 32 32 33 39 20 20 20 >1881234762239 < -0002560 20 20 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 > ". " < -0002600 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -0002620 20 20 20 20 20 20 20 20 20 22 0a 20 20 20 20 22 > ". "< -0002640 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -* -0002700 22 0a 20 20 20 20 22 20 20 20 20 20 20 20 20 20 >". " < -0002720 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -0002740 20 20 20 20 20 20 20 22 20 20 20 20 0a 20 20 20 > " . < -0002760 20 22 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > " < -0003000 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -0003020 20 20 22 3b 0a 20 20 20 20 73 74 72 69 6e 67 20 > ";. string < -0003040 72 65 74 3b 0a 20 20 20 20 66 6f 72 65 61 63 68 >ret;. foreach< -0003060 28 63 3b 20 63 61 73 74 28 75 62 79 74 65 5b 5d >(c; cast(ubyte[]< -0003100 29 77 6f 72 64 29 0a 20 20 20 20 20 20 20 20 69 >)word). i< -0003120 66 20 28 54 52 41 4e 53 4c 41 54 45 5b 63 5d 20 >f (TRANSLATE[c] < -0003140 21 3d 20 27 20 27 29 0a 20 20 20 20 20 20 20 20 >!= ' '). < -0003160 20 20 20 20 72 65 74 20 7e 3d 20 54 52 41 4e 53 > ret ~= TRANS< -0003200 4c 41 54 45 5b 63 5d 3b 0a 20 20 20 20 72 65 74 >LATE[c];. ret< -0003220 75 72 6e 20 72 65 74 3b 0a 7d 0a 0a 75 6e 69 74 >urn ret;.}..unit< -0003240 74 65 73 74 20 7b 0a 20 2f 2f 20 54 65 73 74 20 >test {. // Test < -0003260 77 6f 72 64 54 6f 4e 75 6d 20 75 73 69 6e 67 20 >wordToNum using < -0003300 74 68 65 20 74 61 62 6c 65 20 66 72 6f 6d 20 74 >the table from t< -0003320 68 65 20 74 61 73 6b 20 64 65 73 63 72 69 70 74 >he task descript< -0003340 69 6f 6e 2e 0a 20 61 73 73 65 72 74 28 20 22 30 >ion.. assert( "0< -0003360 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 36 >1112223334455666< -0003400 37 37 37 38 38 38 39 39 39 22 20 3d 3d 0a 20 20 >777888999" ==. < -0003420 20 77 6f 72 64 54 6f 4e 75 6d 28 22 45 20 7c 20 > wordToNum("E | < -0003440 4a 20 4e 20 51 20 7c 20 52 20 57 20 58 20 7c 20 >J N Q | R W X | < -0003460 44 20 53 20 59 20 7c 20 46 20 54 20 7c 20 41 20 >D S Y | F T | A < -0003500 4d 20 7c 20 43 20 49 20 56 20 7c 20 42 20 4b 20 >M | C I V | B K < -0003520 55 20 7c 20 4c 20 4f 20 50 20 7c 20 47 20 48 20 >U | L O P | G H < -0003540 5a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 20 22 >Z"));. assert( "< -0003560 30 31 31 31 32 32 32 33 33 33 34 34 35 35 36 36 >0111222333445566< -0003600 36 37 37 37 38 38 38 39 39 39 22 20 3d 3d 20 0a >6777888999" == .< -0003620 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 65 20 > wordToNum("e < -0003640 7c 20 6a 20 6e 20 71 20 7c 20 72 20 77 20 78 20 >| j n q | r w x < -0003660 7c 20 64 20 73 20 79 20 7c 20 66 20 74 20 7c 20 >| d s y | f t | < -0003700 61 20 6d 20 7c 20 63 20 69 20 76 20 7c 20 62 20 >a m | c i v | b < -0003720 6b 20 75 20 7c 20 6c 20 6f 20 70 20 7c 20 67 20 >k u | l o p | g < -0003740 68 20 7a 22 29 29 3b 0a 20 61 73 73 65 72 74 28 >h z"));. assert(< -0003760 20 22 30 31 32 33 34 35 36 37 38 39 22 20 3d 3d > "0123456789" ==< -0004000 20 0a 20 20 20 77 6f 72 64 54 6f 4e 75 6d 28 22 > . wordToNum("< -0004020 30 20 7c 20 20 20 31 20 20 20 7c 20 20 20 32 20 >0 | 1 | 2 < -0004040 20 20 7c 20 20 20 33 20 20 20 7c 20 20 34 20 20 > | 3 | 4 < -0004060 7c 20 20 35 20 20 7c 20 20 20 36 20 20 20 7c 20 >| 5 | 6 | < -0004100 20 20 37 20 20 20 7c 20 20 20 38 20 20 20 7c 20 > 7 | 8 | < -0004120 20 20 39 22 29 29 3b 0a 7d 0a 0a 76 6f 69 64 20 > 9"));.}..void < -0004140 6d 61 69 6e 28 20 73 74 72 69 6e 67 5b 5d 20 61 >main( string[] a< -0004160 72 67 73 20 29 0a 7b 0a 20 20 20 20 2f 2f 20 54 >rgs ).{. // T< -0004200 68 69 73 20 61 73 73 6f 63 69 61 74 69 76 65 20 >his associative < -0004220 61 72 72 61 79 20 6d 61 70 73 20 61 20 6e 75 6d >array maps a num< -0004240 62 65 72 20 74 6f 20 61 6e 20 61 72 72 61 79 20 >ber to an array < -0004260 6f 66 20 77 6f 72 64 73 2e 20 20 20 20 0a 20 20 >of words. . < -0004300 20 20 73 74 72 69 6e 67 61 72 72 61 79 5b 73 74 > stringarray[st< -0004320 72 69 6e 67 5d 20 20 20 20 6e 75 6d 32 77 6f 72 >ring] num2wor< -0004340 64 73 3b 0a 0a 20 20 20 20 66 6f 72 65 61 63 68 >ds;.. foreach< -0004360 28 73 74 72 69 6e 67 20 77 6f 72 64 3b 20 6e 65 >(string word; ne< -0004400 77 20 42 75 66 66 65 72 65 64 46 69 6c 65 28 22 >w BufferedFile("< -0004420 64 69 63 74 69 6f 6e 61 72 79 2e 74 78 74 22 20 >dictionary.txt" < -0004440 29 20 29 0a 20 20 20 20 20 20 20 20 6e 75 6d 32 >) ). num2< -0004460 77 6f 72 64 73 5b 20 77 6f 72 64 54 6f 4e 75 6d >words[ wordToNum< -0004500 28 77 6f 72 64 29 20 5d 20 7e 3d 20 77 6f 72 64 >(word) ] ~= word< -0004520 2e 64 75 70 3b 20 20 20 20 20 20 20 20 2f 2f 20 >.dup; // < -0004540 6d 75 73 74 20 64 75 70 0a 0a 20 20 20 20 2f 2f >must dup.. //< -0004560 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 6c 74 65 >/ Finds all alte< -0004600 72 6e 61 74 69 76 65 73 20 66 6f 72 20 74 68 65 >rnatives for the< -0004620 20 67 69 76 65 6e 20 6e 75 6d 62 65 72 0a 20 20 > given number. < -0004640 20 20 2f 2f 2f 20 28 73 68 6f 75 6c 64 20 68 61 > /// (should ha< -0004660 76 65 20 62 65 65 6e 20 73 74 72 69 70 70 65 64 >ve been stripped< -0004700 20 66 72 6f 6d 20 6e 6f 6e 2d 64 69 67 69 74 20 > from non-digit < -0004720 63 68 61 72 61 63 74 65 72 73 29 0a 20 20 20 20 >characters). < -0004740 73 74 72 69 6e 67 61 72 72 61 79 20 5f 46 69 6e >stringarray _Fin< -0004760 64 57 6f 72 64 73 28 20 73 74 72 69 6e 67 20 6e >dWords( string n< -0005000 75 6d 62 65 72 73 2c 20 62 6f 6f 6c 20 64 69 67 >umbers, bool dig< -0005020 69 74 6f 6b 20 29 0a 20 20 20 20 69 6e 20 7b 0a >itok ). in {.< -0005040 20 20 20 20 20 20 20 20 61 73 73 65 72 74 28 6e > assert(n< -0005060 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 20 3e 20 >umbers.length > < -0005100 20 30 29 3b 20 20 20 20 0a 20 20 20 20 7d 20 20 > 0); . } < -0005120 20 20 0a 20 20 20 20 6f 75 74 28 72 65 73 75 6c > . out(resul< -0005140 74 29 20 7b 0a 20 20 20 20 20 20 20 20 66 6f 72 >t) {. for< -0005160 65 61 63 68 20 28 61 3b 20 72 65 73 75 6c 74 29 >each (a; result)< -0005200 0a 20 20 20 20 20 20 20 20 20 20 20 20 61 73 73 >. ass< -0005220 65 72 74 28 20 77 6f 72 64 54 6f 4e 75 6d 28 61 >ert( wordToNum(a< -0005240 29 20 3d 3d 20 6e 75 6d 62 65 72 73 20 29 3b 0a >) == numbers );.< -0005260 20 20 20 20 7d 20 20 20 20 0a 20 20 20 20 62 6f > } . bo< -0005300 64 79 20 7b 0a 20 20 20 20 20 20 20 20 73 74 72 >dy {. str< -0005320 69 6e 67 61 72 72 61 79 20 72 65 74 3b 0a 20 20 >ingarray ret;. < -0005340 20 20 20 20 20 20 62 6f 6f 6c 20 66 6f 75 6e 64 > bool found< -0005360 77 6f 72 64 20 3d 20 66 61 6c 73 65 3b 0a 20 20 >word = false;. < -0005400 20 20 20 20 20 20 66 6f 72 20 28 75 69 6e 74 20 > for (uint < -0005420 74 3d 31 3b 20 74 3c 3d 6e 75 6d 62 65 72 73 2e >t=1; t<=numbers.< -0005440 6c 65 6e 67 74 68 3b 20 2b 2b 74 29 20 7b 0a 20 >length; ++t) {. < -0005460 20 20 20 20 20 20 20 20 20 20 20 61 75 74 6f 20 > auto < -0005500 61 6c 74 65 72 6e 61 74 69 76 65 73 20 3d 20 6e >alternatives = n< -0005520 75 6d 62 65 72 73 5b 30 2e 2e 74 5d 20 69 6e 20 >umbers[0..t] in < -0005540 6e 75 6d 32 77 6f 72 64 73 3b 0a 20 20 20 20 20 >num2words;. < -0005560 20 20 20 20 20 20 20 69 66 20 28 21 61 6c 74 65 > if (!alte< -0005600 72 6e 61 74 69 76 65 73 29 0a 20 20 20 20 20 20 >rnatives). < -0005620 20 20 20 20 20 20 20 20 20 20 63 6f 6e 74 69 6e > contin< -0005640 75 65 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 >ue;. < -0005660 66 6f 75 6e 64 77 6f 72 64 20 3d 20 74 72 75 65 >foundword = true< -0005700 3b 0a 20 20 20 20 20 20 20 20 20 20 20 20 69 66 >;. if< -0005720 20 28 6e 75 6d 62 65 72 73 2e 6c 65 6e 67 74 68 > (numbers.length< -0005740 20 3e 20 20 74 29 20 7b 0a 20 20 20 20 20 20 20 > > t) {. < -0005760 20 20 20 20 20 20 20 20 20 2f 2f 20 43 6f 6d 62 > // Comb< -0006000 69 6e 65 20 61 6c 6c 20 63 75 72 72 65 6e 74 20 >ine all current < -0006020 61 6c 74 65 72 6e 61 74 69 76 65 73 20 77 69 74 >alternatives wit< -0006040 68 20 61 6c 6c 20 61 6c 74 65 72 6e 61 74 69 76 >h all alternativ< -0006060 65 73 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 >es . < -0006100 20 20 20 20 20 20 20 20 2f 2f 20 6f 66 20 74 68 > // of th< -0006120 65 20 72 65 73 74 20 28 6e 65 78 74 20 70 69 65 >e rest (next pie< -0006140 63 65 20 63 61 6e 20 73 74 61 72 74 20 77 69 74 >ce can start wit< -0006160 68 20 61 20 64 69 67 69 74 29 20 20 20 20 20 20 >h a digit) < -0006200 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 > . < -0006220 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 63 68 > foreach< -0006240 20 28 61 32 3b 20 5f 46 69 6e 64 57 6f 72 64 73 > (a2; _FindWords< -0006260 28 20 6e 75 6d 62 65 72 73 5b 74 2e 2e 24 5d 2c >( numbers[t..$],< -0006300 20 74 72 75 65 20 20 20 20 20 29 20 29 0a 20 20 > true ) ). < -0006320 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -0006340 20 20 66 6f 72 65 61 63 68 28 61 31 3b 20 2a 61 > foreach(a1; *a< -0006360 6c 74 65 72 6e 61 74 69 76 65 73 29 0a 20 20 20 >lternatives). < -0006400 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -0006420 20 20 20 20 72 65 74 20 7e 3d 20 61 31 20 7e 20 > ret ~= a1 ~ < -0006440 22 20 22 20 7e 20 61 32 3b 0a 20 20 20 20 20 20 >" " ~ a2;. < -0006460 20 20 20 20 20 20 7d 0a 20 20 20 20 20 20 20 20 > }. < -0006500 20 20 20 20 65 6c 73 65 20 20 20 20 0a 20 20 20 > else . < -0006520 20 20 20 20 20 20 20 20 20 20 20 20 20 72 65 74 > ret< -0006540 20 7e 3d 20 2a 61 6c 74 65 72 6e 61 74 69 76 65 > ~= *alternative< -0006560 73 3b 20 20 20 20 2f 2f 20 61 70 70 65 6e 64 20 >s; // append < -0006600 74 68 65 73 65 20 61 6c 74 65 72 6e 61 74 69 76 >these alternativ< -0006620 65 73 0a 20 20 20 20 20 20 20 20 7d 0a 20 20 20 >es. }. < -0006640 20 20 20 20 20 2f 2f 20 54 72 79 20 74 6f 20 6b > // Try to k< -0006660 65 65 70 20 31 20 64 69 67 69 74 2c 20 6f 6e 6c >eep 1 digit, onl< -0006700 79 20 69 66 20 77 65 27 72 65 20 61 6c 6c 6f 77 >y if we're allow< -0006720 65 64 20 61 6e 64 20 6e 6f 20 6f 74 68 65 72 0a >ed and no other.< -0006740 20 20 20 20 20 20 20 20 2f 2f 20 61 6c 74 65 72 > // alter< -0006760 6e 61 74 69 76 65 73 20 77 65 72 65 20 66 6f 75 >natives were fou< -0007000 6e 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 54 65 >nd. // Te< -0007020 73 74 69 6e 67 20 22 72 65 74 2e 6c 65 6e 67 74 >sting "ret.lengt< -0007040 68 22 20 6d 61 6b 65 73 20 6d 6f 72 65 20 73 65 >h" makes more se< -0007060 6e 73 65 20 74 68 61 6e 20 74 65 73 74 69 6e 67 >nse than testing< -0007100 20 22 66 6f 75 6e 64 77 6f 72 64 22 2c 0a 20 20 > "foundword",. < -0007120 20 20 20 20 20 20 2f 2f 20 62 75 74 20 74 68 65 > // but the< -0007140 20 6f 74 68 65 72 20 69 6d 70 6c 65 6d 65 6e 74 > other implement< -0007160 61 74 69 6f 6e 73 20 73 65 65 6d 20 74 6f 20 64 >ations seem to d< -0007200 6f 20 6a 75 73 74 20 74 68 69 73 2e 0a 20 20 20 >o just this.. < -0007220 20 20 20 20 20 69 66 20 28 64 69 67 69 74 6f 6b > if (digitok< -0007240 20 26 26 20 21 66 6f 75 6e 64 77 6f 72 64 29 20 > && !foundword) < -0007260 7b 20 2f 2f 72 65 74 2e 6c 65 6e 67 74 68 20 3d >{ //ret.length =< -0007300 3d 20 30 20 20 0a 20 20 20 20 20 20 20 20 20 20 >= 0 . < -0007320 20 20 69 66 28 6e 75 6d 62 65 72 73 2e 6c 65 6e > if(numbers.len< -0007340 67 74 68 20 3e 20 20 31 29 20 7b 0a 20 20 20 20 >gth > 1) {. < -0007360 20 20 20 20 20 20 20 20 20 20 20 20 2f 2f 20 43 > // C< -0007400 6f 6d 62 69 6e 65 20 31 20 64 69 67 69 74 20 77 >ombine 1 digit w< -0007420 69 74 68 20 61 6c 6c 20 61 6c 74 65 6e 61 74 69 >ith all altenati< -0007440 76 65 73 20 66 72 6f 6d 20 74 68 65 20 72 65 73 >ves from the res< -0007460 74 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 20 >t . < -0007500 20 20 20 20 20 20 2f 2f 20 28 6e 65 78 74 20 70 > // (next p< -0007520 69 65 63 65 20 63 61 6e 20 6e 6f 74 20 73 74 61 >iece can not sta< -0007540 72 74 20 77 69 74 68 20 61 20 64 69 67 69 74 29 >rt with a digit)< -0007560 20 20 20 20 20 20 20 20 20 20 0a 20 20 20 20 20 > . < -0007600 20 20 20 20 20 20 20 20 20 20 20 66 6f 72 65 61 > forea< -0007620 63 68 20 28 61 3b 20 5f 46 69 6e 64 57 6f 72 64 >ch (a; _FindWord< -0007640 73 28 20 6e 75 6d 62 65 72 73 5b 31 2e 2e 24 5d >s( numbers[1..$]< -0007660 2c 20 66 61 6c 73 65 20 29 20 29 0a 20 20 20 20 >, false ) ). < -0007700 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 > < -0007720 72 65 74 20 7e 3d 20 6e 75 6d 62 65 72 73 5b 30 >ret ~= numbers[0< -0007740 2e 2e 31 5d 20 7e 20 22 20 22 20 7e 20 61 3b 0a >..1] ~ " " ~ a;.< -0007760 20 20 20 20 20 20 20 20 20 20 20 20 7d 20 20 20 > } < -0010000 20 0a 20 20 20 20 20 20 20 20 20 20 20 20 65 6c > . el< -0010020 73 65 20 20 20 20 0a 20 20 20 20 20 20 20 20 20 >se . < -0010040 20 20 20 20 20 20 20 72 65 74 20 7e 3d 20 6e 75 > ret ~= nu< -0010060 6d 62 65 72 73 5b 30 2e 2e 31 5d 3b 20 20 20 20 >mbers[0..1]; < -0010100 2f 2f 20 6a 75 73 74 20 61 70 70 65 6e 64 20 74 >// just append t< -0010120 68 69 73 20 64 69 67 69 74 20 20 20 20 20 20 20 >his digit < -0010140 20 20 20 20 20 20 0a 20 20 20 20 20 20 20 20 7d > . }< -0010160 20 20 20 20 0a 20 20 20 20 20 20 20 20 72 65 74 > . ret< -0010200 75 72 6e 20 72 65 74 3b 0a 20 20 20 20 7d 0a 0a >urn ret;. }..< -0010220 20 20 20 20 2f 2f 2f 20 28 54 68 69 73 20 66 75 > /// (This fu< -0010240 6e 63 74 69 6f 6e 20 77 61 73 20 69 6e 6c 69 6e >nction was inlin< -0010260 65 64 20 69 6e 20 74 68 65 20 6f 72 69 67 69 6e >ed in the origin< -0010300 61 6c 20 70 72 6f 67 72 61 6d 29 20 0a 20 20 20 >al program) . < -0010320 20 2f 2f 2f 20 46 69 6e 64 73 20 61 6c 6c 20 61 > /// Finds all a< -0010340 6c 74 65 72 6e 61 74 69 76 65 73 20 66 6f 72 20 >lternatives for < -0010360 74 68 65 20 67 69 76 65 6e 20 70 68 6f 6e 65 20 >the given phone < -0010400 6e 75 6d 62 65 72 20 0a 20 20 20 20 2f 2f 2f 20 >number . /// < -0010420 52 65 74 75 72 6e 73 3a 20 61 72 72 61 79 20 6f >Returns: array o< -0010440 66 20 73 74 72 69 6e 67 73 20 0a 20 20 20 20 73 >f strings . s< -0010460 74 72 69 6e 67 61 72 72 61 79 20 46 69 6e 64 57 >tringarray FindW< -0010500 6f 72 64 73 28 20 73 74 72 69 6e 67 20 70 68 6f >ords( string pho< -0010520 6e 65 5f 6e 75 6d 62 65 72 20 29 0a 20 20 20 20 >ne_number ). < -0010540 7b 0a 20 20 20 20 20 20 20 20 69 66 20 28 21 70 >{. if (!p< -0010560 68 6f 6e 65 5f 6e 75 6d 62 65 72 2e 6c 65 6e 67 >hone_number.leng< -0010600 74 68 29 0a 20 20 20 20 20 20 20 20 20 20 20 20 >th). < -0010620 72 65 74 75 72 6e 20 6e 75 6c 6c 3b 0a 20 20 20 >return null;. < -0010640 20 20 20 20 20 2f 2f 20 53 74 72 69 70 20 74 68 > // Strip th< -0010660 65 20 6e 6f 6e 2d 64 69 67 69 74 20 63 68 61 72 >e non-digit char< -0010700 61 63 74 65 72 73 20 66 72 6f 6d 20 74 68 65 20 >acters from the < -0010720 70 68 6f 6e 65 20 6e 75 6d 62 65 72 2c 20 61 6e >phone number, an< -0010740 64 0a 20 20 20 20 20 20 20 20 2f 2f 20 70 61 73 >d. // pas< -0010760 73 20 69 74 20 74 6f 20 74 68 65 20 72 65 63 75 >s it to the recu< -0011000 72 73 69 76 65 20 66 75 6e 63 74 69 6f 6e 20 28 >rsive function (< -0011020 6c 65 61 64 69 6e 67 20 64 69 67 69 74 20 69 73 >leading digit is< -0011040 20 61 6c 6c 6f 77 65 64 29 0a 20 20 20 20 20 20 > allowed). < -0011060 20 20 72 65 74 75 72 6e 20 5f 46 69 6e 64 57 6f > return _FindWo< -0011100 72 64 73 28 20 73 74 72 69 70 4e 6f 6e 44 69 67 >rds( stripNonDig< -0011120 69 74 28 70 68 6f 6e 65 5f 6e 75 6d 62 65 72 29 >it(phone_number)< -0011140 2c 20 74 72 75 65 20 29 3b 20 20 20 20 0a 20 20 >, true ); . < -0011160 20 20 7d 20 20 20 20 0a 20 20 20 20 0a 20 20 20 > } . . < -0011200 20 2f 2f 20 52 65 61 64 20 74 68 65 20 70 68 6f > // Read the pho< -0011220 6e 65 20 6e 75 6d 62 65 72 73 20 20 20 20 20 0a >ne numbers .< -0011240 20 20 20 20 66 6f 72 65 61 63 68 28 73 74 72 69 > foreach(stri< -0011260 6e 67 20 70 68 6f 6e 65 3b 20 6e 65 77 20 42 75 >ng phone; new Bu< -0011300 66 66 65 72 65 64 46 69 6c 65 28 22 69 6e 70 75 >fferedFile("inpu< -0011320 74 2e 74 78 74 22 20 20 20 29 20 29 0a 20 20 20 >t.txt" ) ). < -0011340 20 20 20 20 20 66 6f 72 65 61 63 68 28 61 6c 74 > foreach(alt< -0011360 65 72 6e 61 74 69 76 65 3b 20 46 69 6e 64 57 6f >ernative; FindWo< -0011400 72 64 73 28 20 70 68 6f 6e 65 20 29 20 29 0a 20 >rds( phone ) ). < -0011420 20 20 20 20 20 20 20 20 20 20 20 77 72 69 74 65 > write< -0011440 66 6c 6e 28 70 68 6f 6e 65 2c 20 22 3a 20 22 2c >fln(phone, ": ",< -0011460 20 61 6c 74 65 72 6e 61 74 69 76 65 20 29 3b 0a > alternative );.< -0011500 7d 0a 0a >}..< -0011503 diff --git a/tests/examplefiles/hexdump_xxd b/tests/examplefiles/hexdump_xxd deleted file mode 100644 index 33a8a6e1..00000000 --- a/tests/examplefiles/hexdump_xxd +++ /dev/null @@ -1,309 +0,0 @@ -0000000: 2f2f 2043 7265 6174 6564 2062 7920 4c69 // Created by Li -0000010: 6f6e 656c 6c6f 204c 756e 6573 7520 616e onello Lunesu an -0000020: 6420 706c 6163 6564 2069 6e20 7468 6520 d placed in the -0000030: 7075 626c 6963 2064 6f6d 6169 6e2e 0a2f public domain../ -0000040: 2f20 5468 6973 2066 696c 6520 6861 7320 / This file has -0000050: 6265 656e 206d 6f64 6966 6965 6420 6672 been modified fr -0000060: 6f6d 2069 7473 206f 7269 6769 6e61 6c20 om its original -0000070: 7665 7273 696f 6e2e 0a2f 2f20 4974 2068 version..// It h -0000080: 6173 2062 6565 6e20 666f 726d 6174 7465 as been formatte -0000090: 6420 746f 2066 6974 2079 6f75 7220 7363 d to fit your sc -00000a0: 7265 656e 2e0a 6d6f 6475 6c65 2070 686f reen..module pho -00000b0: 6e65 6e6f 3b20 2020 2020 2f2f 206f 7074 neno; // opt -00000c0: 696f 6e61 6c0a 696d 706f 7274 2073 7464 ional.import std -00000d0: 2e73 7464 696f 3b20 2020 2f2f 2077 7269 .stdio; // wri -00000e0: 7465 666c 6e20 2020 2020 0a69 6d70 6f72 tefln .impor -00000f0: 7420 7374 642e 6374 7970 653b 2020 202f t std.ctype; / -0000100: 2f20 6973 6469 6769 7420 2020 2020 0a69 / isdigit .i -0000110: 6d70 6f72 7420 7374 642e 7374 7265 616d mport std.stream -0000120: 3b20 202f 2f20 4275 6666 6572 6564 4669 ; // BufferedFi -0000130: 6c65 0a0a 2f2f 204a 7573 7420 666f 7220 le..// Just for -0000140: 7265 6164 6162 696c 6974 7920 2869 6d61 readability (ima -0000150: 6769 6e65 2063 6861 725b 5d5b 5d5b 6368 gine char[][][ch -0000160: 6172 5b5d 5d29 2020 2020 0a61 6c69 6173 ar[]]) .alias -0000170: 2063 6861 725b 5d20 7374 7269 6e67 3b0a char[] string;. -0000180: 616c 6961 7320 7374 7269 6e67 5b5d 2073 alias string[] s -0000190: 7472 696e 6761 7272 6179 3b0a 0a2f 2f2f tringarray;../// -00001a0: 2053 7472 6970 7320 6e6f 6e2d 6469 6769 Strips non-digi -00001b0: 7420 6368 6172 6163 7465 7273 2066 726f t characters fro -00001c0: 6d20 7468 6520 7374 7269 6e67 2028 434f m the string (CO -00001d0: 5729 0a73 7472 696e 6720 7374 7269 704e W).string stripN -00001e0: 6f6e 4469 6769 7428 2069 6e20 7374 7269 onDigit( in stri -00001f0: 6e67 206c 696e 6520 2920 0a7b 0a20 2020 ng line ) .{. -0000200: 2073 7472 696e 6720 7265 743b 0a20 2020 string ret;. -0000210: 2066 6f72 6561 6368 2875 696e 7420 692c foreach(uint i, -0000220: 2063 3b20 6c69 6e65 2920 7b0a 2020 2020 c; line) {. -0000230: 2020 2020 2f2f 2045 7272 6f72 3a20 7374 // Error: st -0000240: 642e 6374 7970 652e 6973 6469 6769 7420 d.ctype.isdigit -0000250: 6174 2043 3a5c 646d 645c 7372 635c 7068 at C:\dmd\src\ph -0000260: 6f62 6f73 5c73 7464 5c63 7479 7065 2e64 obos\std\ctype.d -0000270: 2833 3729 200a 2020 2020 2020 2020 2f2f (37) . // -0000280: 2063 6f6e 666c 6963 7473 2077 6974 6820 conflicts with -0000290: 7374 642e 7374 7265 616d 2e69 7364 6967 std.stream.isdig -00002a0: 6974 2061 7420 433a 5c64 6d64 5c73 7263 it at C:\dmd\src -00002b0: 5c70 686f 626f 735c 7374 645c 7374 7265 \phobos\std\stre -00002c0: 616d 2e64 2832 3932 3429 0a20 2020 2020 am.d(2924). -00002d0: 2020 2069 6620 2821 7374 642e 6374 7970 if (!std.ctyp -00002e0: 652e 6973 6469 6769 7428 6329 2920 7b0a e.isdigit(c)) {. -00002f0: 2020 2020 2020 2020 2020 2020 6966 2028 if ( -0000300: 2172 6574 290a 2020 2020 2020 2020 2020 !ret). -0000310: 2020 2020 2020 7265 7420 3d20 6c69 6e65 ret = line -0000320: 5b30 2e2e 695d 3b20 2020 200a 2020 2020 [0..i]; . -0000330: 2020 2020 7d20 2020 200a 2020 2020 2020 } . -0000340: 2020 656c 7365 2069 6620 2872 6574 290a else if (ret). -0000350: 2020 2020 2020 2020 2020 2020 7265 7420 ret -0000360: 7e3d 2063 3b20 2020 200a 2020 2020 7d20 ~= c; . } -0000370: 2020 200a 2020 2020 7265 7475 726e 2072 . return r -0000380: 6574 3f72 6574 3a6c 696e 653b 0a7d 0a0a et?ret:line;.}.. -0000390: 756e 6974 7465 7374 207b 0a20 2020 2061 unittest {. a -00003a0: 7373 6572 7428 2073 7472 6970 4e6f 6e44 ssert( stripNonD -00003b0: 6967 6974 2822 6173 6466 2229 203d 3d20 igit("asdf") == -00003c0: 2222 2020 293b 0a20 2020 2061 7373 6572 "" );. asser -00003d0: 7428 2073 7472 6970 4e6f 6e44 6967 6974 t( stripNonDigit -00003e0: 2822 5c27 3133 2d3d 3220 346b 6f70 2229 ("\'13-=2 4kop") -00003f0: 203d 3d20 2022 3133 3234 2220 2029 3b0a == "1324" );. -0000400: 7d0a 0a2f 2f2f 2043 6f6e 7665 7274 7320 }../// Converts -0000410: 6120 776f 7264 2069 6e74 6f20 6120 6e75 a word into a nu -0000420: 6d62 6572 2c20 6967 6e6f 7269 6e67 2061 mber, ignoring a -0000430: 6c6c 206e 6f6e 2061 6c70 6861 2063 6861 ll non alpha cha -0000440: 7261 6374 6572 7320 200a 7374 7269 6e67 racters .string -0000450: 2077 6f72 6454 6f4e 756d 2820 696e 2073 wordToNum( in s -0000460: 7472 696e 6720 776f 7264 2029 0a7b 0a2f tring word ).{./ -0000470: 2f20 7472 616e 736c 6174 696f 6e20 7461 / translation ta -0000480: 626c 6520 666f 7220 7468 6520 7461 736b ble for the task -0000490: 2061 7420 6861 6e64 0a63 6f6e 7374 2063 at hand.const c -00004a0: 6861 725b 3235 365d 2054 5241 4e53 4c41 har[256] TRANSLA -00004b0: 5445 203d 2020 2020 0a20 2020 2022 2020 TE = . " -00004c0: 2020 2020 2020 2020 2020 2020 2020 2020 -00004d0: 2020 2020 2020 2020 2020 2020 2020 2220 " -00004e0: 202f 2f20 3020 2020 0a20 2020 2022 2020 // 0 . " -00004f0: 2020 2020 2020 2020 2020 2020 2020 3031 01 -0000500: 3233 3435 3637 3839 2020 2020 2020 2220 23456789 " -0000510: 202f 2f20 3332 2020 2020 200a 2020 2020 // 32 . -0000520: 2220 3537 3633 3034 3939 3631 3738 3531 " 57630499617851 -0000530: 3838 3132 3334 3736 3232 3339 2020 2020 881234762239 -0000540: 2022 2020 2f2f 2036 3420 2020 0a20 2020 " // 64 . -0000550: 2022 2035 3736 3330 3439 3936 3137 3835 " 5763049961785 -0000560: 3138 3831 3233 3437 3632 3233 3920 2020 1881234762239 -0000570: 2020 220a 2020 2020 2220 2020 2020 2020 ". " -0000580: 2020 2020 2020 2020 2020 2020 2020 2020 -0000590: 2020 2020 2020 2020 2022 0a20 2020 2022 ". " -00005a0: 2020 2020 2020 2020 2020 2020 2020 2020 -00005b0: 2020 2020 2020 2020 2020 2020 2020 2020 -00005c0: 220a 2020 2020 2220 2020 2020 2020 2020 ". " -00005d0: 2020 2020 2020 2020 2020 2020 2020 2020 -00005e0: 2020 2020 2020 2022 2020 2020 0a20 2020 " . -00005f0: 2022 2020 2020 2020 2020 2020 2020 2020 " -0000600: 2020 2020 2020 2020 2020 2020 2020 2020 -0000610: 2020 223b 0a20 2020 2073 7472 696e 6720 ";. string -0000620: 7265 743b 0a20 2020 2066 6f72 6561 6368 ret;. foreach -0000630: 2863 3b20 6361 7374 2875 6279 7465 5b5d (c; cast(ubyte[] -0000640: 2977 6f72 6429 0a20 2020 2020 2020 2069 )word). i -0000650: 6620 2854 5241 4e53 4c41 5445 5b63 5d20 f (TRANSLATE[c] -0000660: 213d 2027 2027 290a 2020 2020 2020 2020 != ' '). -0000670: 2020 2020 7265 7420 7e3d 2054 5241 4e53 ret ~= TRANS -0000680: 4c41 5445 5b63 5d3b 0a20 2020 2072 6574 LATE[c];. ret -0000690: 7572 6e20 7265 743b 0a7d 0a0a 756e 6974 urn ret;.}..unit -00006a0: 7465 7374 207b 0a20 2f2f 2054 6573 7420 test {. // Test -00006b0: 776f 7264 546f 4e75 6d20 7573 696e 6720 wordToNum using -00006c0: 7468 6520 7461 626c 6520 6672 6f6d 2074 the table from t -00006d0: 6865 2074 6173 6b20 6465 7363 7269 7074 he task descript -00006e0: 696f 6e2e 0a20 6173 7365 7274 2820 2230 ion.. assert( "0 -00006f0: 3131 3132 3232 3333 3334 3435 3536 3636 1112223334455666 -0000700: 3737 3738 3838 3939 3922 203d 3d0a 2020 777888999" ==. -0000710: 2077 6f72 6454 6f4e 756d 2822 4520 7c20 wordToNum("E | -0000720: 4a20 4e20 5120 7c20 5220 5720 5820 7c20 J N Q | R W X | -0000730: 4420 5320 5920 7c20 4620 5420 7c20 4120 D S Y | F T | A -0000740: 4d20 7c20 4320 4920 5620 7c20 4220 4b20 M | C I V | B K -0000750: 5520 7c20 4c20 4f20 5020 7c20 4720 4820 U | L O P | G H -0000760: 5a22 2929 3b0a 2061 7373 6572 7428 2022 Z"));. assert( " -0000770: 3031 3131 3232 3233 3333 3434 3535 3636 0111222333445566 -0000780: 3637 3737 3838 3839 3939 2220 3d3d 200a 6777888999" == . -0000790: 2020 2077 6f72 6454 6f4e 756d 2822 6520 wordToNum("e -00007a0: 7c20 6a20 6e20 7120 7c20 7220 7720 7820 | j n q | r w x -00007b0: 7c20 6420 7320 7920 7c20 6620 7420 7c20 | d s y | f t | -00007c0: 6120 6d20 7c20 6320 6920 7620 7c20 6220 a m | c i v | b -00007d0: 6b20 7520 7c20 6c20 6f20 7020 7c20 6720 k u | l o p | g -00007e0: 6820 7a22 2929 3b0a 2061 7373 6572 7428 h z"));. assert( -00007f0: 2022 3031 3233 3435 3637 3839 2220 3d3d "0123456789" == -0000800: 200a 2020 2077 6f72 6454 6f4e 756d 2822 . wordToNum(" -0000810: 3020 7c20 2020 3120 2020 7c20 2020 3220 0 | 1 | 2 -0000820: 2020 7c20 2020 3320 2020 7c20 2034 2020 | 3 | 4 -0000830: 7c20 2035 2020 7c20 2020 3620 2020 7c20 | 5 | 6 | -0000840: 2020 3720 2020 7c20 2020 3820 2020 7c20 7 | 8 | -0000850: 2020 3922 2929 3b0a 7d0a 0a76 6f69 6420 9"));.}..void -0000860: 6d61 696e 2820 7374 7269 6e67 5b5d 2061 main( string[] a -0000870: 7267 7320 290a 7b0a 2020 2020 2f2f 2054 rgs ).{. // T -0000880: 6869 7320 6173 736f 6369 6174 6976 6520 his associative -0000890: 6172 7261 7920 6d61 7073 2061 206e 756d array maps a num -00008a0: 6265 7220 746f 2061 6e20 6172 7261 7920 ber to an array -00008b0: 6f66 2077 6f72 6473 2e20 2020 200a 2020 of words. . -00008c0: 2020 7374 7269 6e67 6172 7261 795b 7374 stringarray[st -00008d0: 7269 6e67 5d20 2020 206e 756d 3277 6f72 ring] num2wor -00008e0: 6473 3b0a 0a20 2020 2066 6f72 6561 6368 ds;.. foreach -00008f0: 2873 7472 696e 6720 776f 7264 3b20 6e65 (string word; ne -0000900: 7720 4275 6666 6572 6564 4669 6c65 2822 w BufferedFile(" -0000910: 6469 6374 696f 6e61 7279 2e74 7874 2220 dictionary.txt" -0000920: 2920 290a 2020 2020 2020 2020 6e75 6d32 ) ). num2 -0000930: 776f 7264 735b 2077 6f72 6454 6f4e 756d words[ wordToNum -0000940: 2877 6f72 6429 205d 207e 3d20 776f 7264 (word) ] ~= word -0000950: 2e64 7570 3b20 2020 2020 2020 202f 2f20 .dup; // -0000960: 6d75 7374 2064 7570 0a0a 2020 2020 2f2f must dup.. // -0000970: 2f20 4669 6e64 7320 616c 6c20 616c 7465 / Finds all alte -0000980: 726e 6174 6976 6573 2066 6f72 2074 6865 rnatives for the -0000990: 2067 6976 656e 206e 756d 6265 720a 2020 given number. -00009a0: 2020 2f2f 2f20 2873 686f 756c 6420 6861 /// (should ha -00009b0: 7665 2062 6565 6e20 7374 7269 7070 6564 ve been stripped -00009c0: 2066 726f 6d20 6e6f 6e2d 6469 6769 7420 from non-digit -00009d0: 6368 6172 6163 7465 7273 290a 2020 2020 characters). -00009e0: 7374 7269 6e67 6172 7261 7920 5f46 696e stringarray _Fin -00009f0: 6457 6f72 6473 2820 7374 7269 6e67 206e dWords( string n -0000a00: 756d 6265 7273 2c20 626f 6f6c 2064 6967 umbers, bool dig -0000a10: 6974 6f6b 2029 0a20 2020 2069 6e20 7b0a itok ). in {. -0000a20: 2020 2020 2020 2020 6173 7365 7274 286e assert(n -0000a30: 756d 6265 7273 2e6c 656e 6774 6820 3e20 umbers.length > -0000a40: 2030 293b 2020 2020 0a20 2020 207d 2020 0); . } -0000a50: 2020 0a20 2020 206f 7574 2872 6573 756c . out(resul -0000a60: 7429 207b 0a20 2020 2020 2020 2066 6f72 t) {. for -0000a70: 6561 6368 2028 613b 2072 6573 756c 7429 each (a; result) -0000a80: 0a20 2020 2020 2020 2020 2020 2061 7373 . ass -0000a90: 6572 7428 2077 6f72 6454 6f4e 756d 2861 ert( wordToNum(a -0000aa0: 2920 3d3d 206e 756d 6265 7273 2029 3b0a ) == numbers );. -0000ab0: 2020 2020 7d20 2020 200a 2020 2020 626f } . bo -0000ac0: 6479 207b 0a20 2020 2020 2020 2073 7472 dy {. str -0000ad0: 696e 6761 7272 6179 2072 6574 3b0a 2020 ingarray ret;. -0000ae0: 2020 2020 2020 626f 6f6c 2066 6f75 6e64 bool found -0000af0: 776f 7264 203d 2066 616c 7365 3b0a 2020 word = false;. -0000b00: 2020 2020 2020 666f 7220 2875 696e 7420 for (uint -0000b10: 743d 313b 2074 3c3d 6e75 6d62 6572 732e t=1; t<=numbers. -0000b20: 6c65 6e67 7468 3b20 2b2b 7429 207b 0a20 length; ++t) {. -0000b30: 2020 2020 2020 2020 2020 2061 7574 6f20 auto -0000b40: 616c 7465 726e 6174 6976 6573 203d 206e alternatives = n -0000b50: 756d 6265 7273 5b30 2e2e 745d 2069 6e20 umbers[0..t] in -0000b60: 6e75 6d32 776f 7264 733b 0a20 2020 2020 num2words;. -0000b70: 2020 2020 2020 2069 6620 2821 616c 7465 if (!alte -0000b80: 726e 6174 6976 6573 290a 2020 2020 2020 rnatives). -0000b90: 2020 2020 2020 2020 2020 636f 6e74 696e contin -0000ba0: 7565 3b0a 2020 2020 2020 2020 2020 2020 ue;. -0000bb0: 666f 756e 6477 6f72 6420 3d20 7472 7565 foundword = true -0000bc0: 3b0a 2020 2020 2020 2020 2020 2020 6966 ;. if -0000bd0: 2028 6e75 6d62 6572 732e 6c65 6e67 7468 (numbers.length -0000be0: 203e 2020 7429 207b 0a20 2020 2020 2020 > t) {. -0000bf0: 2020 2020 2020 2020 202f 2f20 436f 6d62 // Comb -0000c00: 696e 6520 616c 6c20 6375 7272 656e 7420 ine all current -0000c10: 616c 7465 726e 6174 6976 6573 2077 6974 alternatives wit -0000c20: 6820 616c 6c20 616c 7465 726e 6174 6976 h all alternativ -0000c30: 6573 2020 2020 200a 2020 2020 2020 2020 es . -0000c40: 2020 2020 2020 2020 2f2f 206f 6620 7468 // of th -0000c50: 6520 7265 7374 2028 6e65 7874 2070 6965 e rest (next pie -0000c60: 6365 2063 616e 2073 7461 7274 2077 6974 ce can start wit -0000c70: 6820 6120 6469 6769 7429 2020 2020 2020 h a digit) -0000c80: 2020 2020 2020 2020 0a20 2020 2020 2020 . -0000c90: 2020 2020 2020 2020 2066 6f72 6561 6368 foreach -0000ca0: 2028 6132 3b20 5f46 696e 6457 6f72 6473 (a2; _FindWords -0000cb0: 2820 6e75 6d62 6572 735b 742e 2e24 5d2c ( numbers[t..$], -0000cc0: 2074 7275 6520 2020 2020 2920 290a 2020 true ) ). -0000cd0: 2020 2020 2020 2020 2020 2020 2020 2020 -0000ce0: 2020 666f 7265 6163 6828 6131 3b20 2a61 foreach(a1; *a -0000cf0: 6c74 6572 6e61 7469 7665 7329 0a20 2020 lternatives). -0000d00: 2020 2020 2020 2020 2020 2020 2020 2020 -0000d10: 2020 2020 7265 7420 7e3d 2061 3120 7e20 ret ~= a1 ~ -0000d20: 2220 2220 7e20 6132 3b0a 2020 2020 2020 " " ~ a2;. -0000d30: 2020 2020 2020 7d0a 2020 2020 2020 2020 }. -0000d40: 2020 2020 656c 7365 2020 2020 0a20 2020 else . -0000d50: 2020 2020 2020 2020 2020 2020 2072 6574 ret -0000d60: 207e 3d20 2a61 6c74 6572 6e61 7469 7665 ~= *alternative -0000d70: 733b 2020 2020 2f2f 2061 7070 656e 6420 s; // append -0000d80: 7468 6573 6520 616c 7465 726e 6174 6976 these alternativ -0000d90: 6573 0a20 2020 2020 2020 207d 0a20 2020 es. }. -0000da0: 2020 2020 202f 2f20 5472 7920 746f 206b // Try to k -0000db0: 6565 7020 3120 6469 6769 742c 206f 6e6c eep 1 digit, onl -0000dc0: 7920 6966 2077 6527 7265 2061 6c6c 6f77 y if we're allow -0000dd0: 6564 2061 6e64 206e 6f20 6f74 6865 720a ed and no other. -0000de0: 2020 2020 2020 2020 2f2f 2061 6c74 6572 // alter -0000df0: 6e61 7469 7665 7320 7765 7265 2066 6f75 natives were fou -0000e00: 6e64 0a20 2020 2020 2020 202f 2f20 5465 nd. // Te -0000e10: 7374 696e 6720 2272 6574 2e6c 656e 6774 sting "ret.lengt -0000e20: 6822 206d 616b 6573 206d 6f72 6520 7365 h" makes more se -0000e30: 6e73 6520 7468 616e 2074 6573 7469 6e67 nse than testing -0000e40: 2022 666f 756e 6477 6f72 6422 2c0a 2020 "foundword",. -0000e50: 2020 2020 2020 2f2f 2062 7574 2074 6865 // but the -0000e60: 206f 7468 6572 2069 6d70 6c65 6d65 6e74 other implement -0000e70: 6174 696f 6e73 2073 6565 6d20 746f 2064 ations seem to d -0000e80: 6f20 6a75 7374 2074 6869 732e 0a20 2020 o just this.. -0000e90: 2020 2020 2069 6620 2864 6967 6974 6f6b if (digitok -0000ea0: 2026 2620 2166 6f75 6e64 776f 7264 2920 && !foundword) -0000eb0: 7b20 2f2f 7265 742e 6c65 6e67 7468 203d { //ret.length = -0000ec0: 3d20 3020 200a 2020 2020 2020 2020 2020 = 0 . -0000ed0: 2020 6966 286e 756d 6265 7273 2e6c 656e if(numbers.len -0000ee0: 6774 6820 3e20 2031 2920 7b0a 2020 2020 gth > 1) {. -0000ef0: 2020 2020 2020 2020 2020 2020 2f2f 2043 // C -0000f00: 6f6d 6269 6e65 2031 2064 6967 6974 2077 ombine 1 digit w -0000f10: 6974 6820 616c 6c20 616c 7465 6e61 7469 ith all altenati -0000f20: 7665 7320 6672 6f6d 2074 6865 2072 6573 ves from the res -0000f30: 7420 2020 200a 2020 2020 2020 2020 2020 t . -0000f40: 2020 2020 2020 2f2f 2028 6e65 7874 2070 // (next p -0000f50: 6965 6365 2063 616e 206e 6f74 2073 7461 iece can not sta -0000f60: 7274 2077 6974 6820 6120 6469 6769 7429 rt with a digit) -0000f70: 2020 2020 2020 2020 2020 0a20 2020 2020 . -0000f80: 2020 2020 2020 2020 2020 2066 6f72 6561 forea -0000f90: 6368 2028 613b 205f 4669 6e64 576f 7264 ch (a; _FindWord -0000fa0: 7328 206e 756d 6265 7273 5b31 2e2e 245d s( numbers[1..$] -0000fb0: 2c20 6661 6c73 6520 2920 290a 2020 2020 , false ) ). -0000fc0: 2020 2020 2020 2020 2020 2020 2020 2020 -0000fd0: 7265 7420 7e3d 206e 756d 6265 7273 5b30 ret ~= numbers[0 -0000fe0: 2e2e 315d 207e 2022 2022 207e 2061 3b0a ..1] ~ " " ~ a;. -0000ff0: 2020 2020 2020 2020 2020 2020 7d20 2020 } -0001000: 200a 2020 2020 2020 2020 2020 2020 656c . el -0001010: 7365 2020 2020 0a20 2020 2020 2020 2020 se . -0001020: 2020 2020 2020 2072 6574 207e 3d20 6e75 ret ~= nu -0001030: 6d62 6572 735b 302e 2e31 5d3b 2020 2020 mbers[0..1]; -0001040: 2f2f 206a 7573 7420 6170 7065 6e64 2074 // just append t -0001050: 6869 7320 6469 6769 7420 2020 2020 2020 his digit -0001060: 2020 2020 2020 0a20 2020 2020 2020 207d . } -0001070: 2020 2020 0a20 2020 2020 2020 2072 6574 . ret -0001080: 7572 6e20 7265 743b 0a20 2020 207d 0a0a urn ret;. }.. -0001090: 2020 2020 2f2f 2f20 2854 6869 7320 6675 /// (This fu -00010a0: 6e63 7469 6f6e 2077 6173 2069 6e6c 696e nction was inlin -00010b0: 6564 2069 6e20 7468 6520 6f72 6967 696e ed in the origin -00010c0: 616c 2070 726f 6772 616d 2920 0a20 2020 al program) . -00010d0: 202f 2f2f 2046 696e 6473 2061 6c6c 2061 /// Finds all a -00010e0: 6c74 6572 6e61 7469 7665 7320 666f 7220 lternatives for -00010f0: 7468 6520 6769 7665 6e20 7068 6f6e 6520 the given phone -0001100: 6e75 6d62 6572 200a 2020 2020 2f2f 2f20 number . /// -0001110: 5265 7475 726e 733a 2061 7272 6179 206f Returns: array o -0001120: 6620 7374 7269 6e67 7320 0a20 2020 2073 f strings . s -0001130: 7472 696e 6761 7272 6179 2046 696e 6457 tringarray FindW -0001140: 6f72 6473 2820 7374 7269 6e67 2070 686f ords( string pho -0001150: 6e65 5f6e 756d 6265 7220 290a 2020 2020 ne_number ). -0001160: 7b0a 2020 2020 2020 2020 6966 2028 2170 {. if (!p -0001170: 686f 6e65 5f6e 756d 6265 722e 6c65 6e67 hone_number.leng -0001180: 7468 290a 2020 2020 2020 2020 2020 2020 th). -0001190: 7265 7475 726e 206e 756c 6c3b 0a20 2020 return null;. -00011a0: 2020 2020 202f 2f20 5374 7269 7020 7468 // Strip th -00011b0: 6520 6e6f 6e2d 6469 6769 7420 6368 6172 e non-digit char -00011c0: 6163 7465 7273 2066 726f 6d20 7468 6520 acters from the -00011d0: 7068 6f6e 6520 6e75 6d62 6572 2c20 616e phone number, an -00011e0: 640a 2020 2020 2020 2020 2f2f 2070 6173 d. // pas -00011f0: 7320 6974 2074 6f20 7468 6520 7265 6375 s it to the recu -0001200: 7273 6976 6520 6675 6e63 7469 6f6e 2028 rsive function ( -0001210: 6c65 6164 696e 6720 6469 6769 7420 6973 leading digit is -0001220: 2061 6c6c 6f77 6564 290a 2020 2020 2020 allowed). -0001230: 2020 7265 7475 726e 205f 4669 6e64 576f return _FindWo -0001240: 7264 7328 2073 7472 6970 4e6f 6e44 6967 rds( stripNonDig -0001250: 6974 2870 686f 6e65 5f6e 756d 6265 7229 it(phone_number) -0001260: 2c20 7472 7565 2029 3b20 2020 200a 2020 , true ); . -0001270: 2020 7d20 2020 200a 2020 2020 0a20 2020 } . . -0001280: 202f 2f20 5265 6164 2074 6865 2070 686f // Read the pho -0001290: 6e65 206e 756d 6265 7273 2020 2020 200a ne numbers . -00012a0: 2020 2020 666f 7265 6163 6828 7374 7269 foreach(stri -00012b0: 6e67 2070 686f 6e65 3b20 6e65 7720 4275 ng phone; new Bu -00012c0: 6666 6572 6564 4669 6c65 2822 696e 7075 fferedFile("inpu -00012d0: 742e 7478 7422 2020 2029 2029 0a20 2020 t.txt" ) ). -00012e0: 2020 2020 2066 6f72 6561 6368 2861 6c74 foreach(alt -00012f0: 6572 6e61 7469 7665 3b20 4669 6e64 576f ernative; FindWo -0001300: 7264 7328 2070 686f 6e65 2029 2029 0a20 rds( phone ) ). -0001310: 2020 2020 2020 2020 2020 2077 7269 7465 write -0001320: 666c 6e28 7068 6f6e 652c 2022 3a20 222c fln(phone, ": ", -0001330: 2061 6c74 6572 6e61 7469 7665 2029 3b0a alternative );. -0001340: 7d0a 0a }.. diff --git a/tests/examplefiles/html+php_faulty.php b/tests/examplefiles/html+php_faulty.php deleted file mode 100644 index b3d9bbc7..00000000 --- a/tests/examplefiles/html+php_faulty.php +++ /dev/null @@ -1 +0,0 @@ - - * - * Hybris is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Hybris is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with Hybris. If not, see . -*/ -import std.io.file; - -class File { - - protected file, fileName, mode; - - public method File( fileName, mode ){ - me.fileName = fileName; - me.mode = mode; - me.file = fopen ( me.fileName, me.mode); - } - - private method isBinary(){ - return me.mode.find("b") != false; - } - - public method File ( file ){ - me.file = file; - } - - private method __expire() { - me.close(); - } - - public method close(){ - fclose( me.file ); - } - - public method readLine(){ - return line = fgets( me.file ); - } - - public method getFileName(){ - return me.fileName; - } - - public method getSize(){ - return fsize( me.fileName ); - } - - public method getPosition(){ - return ftell( me.file ); - } - - public method readAll(){ - text = ""; - line = ""; - while ( ( line = fgets(me.file) ) != 0 ){ - text += line; - } - return text; - } - - public method read(){ - byte = ' '; - if ( fread( me.file, byte) > 0 ) { - return byte; - } - else { - return -1; - } - } - - public method read( bytes ) { - word = ""; - byte = ' '; - if ( fread( me.file, byte, bytes) > 0 ) { - word += byte; - } - else { - return -1; - } - return word; - } - - public method read ( seek, seekType ){ - if ( me.seek( seek, seekType) == 0 ) { - return -1; - } - - return me.read(); - } - - public method read ( bytes, seek, seekType ){ - if ( me.seek( seek, seekType) == 0 ) { - return -1; - } - - return me.read( bytes ); - } - - public method readType ( type ){ - if ( me.isBinary() == false ) { - return -1; - } - if ( fread (me.file, type ) > 0 ) { - return type; - } - else { - return -1; - } - } - - operator >> ( object ){ - return me.readType(object); - } - - public method readType ( type, bytes ){ - if ( me.isBinary() == false ) { - return -1; - } - if ( fread (me.file, type, bytes ) > 0){ - return type; - } - else { - return -1; - } - } - - public method readType ( type, seek, seekType ){ - if ( ( me.isBinary() == false ) | ( me.seek( seek, seekType) == 0 ) ) { - return -1; - } - - return me.readType( type ); - } - - public method readType( type, bytes, seek, seekType){ - if ( ( me.isBinary() == false ) | ( me.seek( seek, seekType) == 0 ) ) { - return -1; - } - - return me.readType( type, bytes ); - } - - public method write( data ){ - return fwrite( me.file, data ); - } - - operator << ( object ){ - return me.write(object); - } - - public method write ( data, bytes ){ - return fwrite( me.file, data, bytes); - } - - public method seek( pos, mode ){ - return fseek( me.file, pos, mode ); - } - - public method merge ( fileName ){ - text = file ( fileName ); - return me.write ( me.file, text ); - } -} diff --git a/tests/examplefiles/idl_sample.pro b/tests/examplefiles/idl_sample.pro deleted file mode 100644 index 814d510d..00000000 --- a/tests/examplefiles/idl_sample.pro +++ /dev/null @@ -1,73 +0,0 @@ -; docformat = 'rst' - -; Example IDL (Interactive Data Language) source code. - -;+ -; Get `nIndices` random indices for an array of size `nValues` (without -; repeating an index). -; -; :Examples: -; Try:: -; -; IDL> r = randomu(seed, 10) -; IDL> print, r, format='(4F)' -; 0.6297589 0.7815896 0.2508559 0.7546844 -; 0.1353382 0.1245834 0.8733745 0.0753110 -; 0.8054136 0.9513228 -; IDL> ind = mg_sample(10, 3, seed=seed) -; IDL> print, ind -; 2 4 7 -; IDL> print, r[ind] -; 0.250856 0.135338 0.0753110 -; -; :Returns: -; lonarr(`nIndices`) -; -; :Params: -; nValues : in, required, type=long -; size of array to choose indices from -; nIndices : in, required, type=long -; number of indices needed -; -; :Keywords: -; seed : in, out, optional, type=integer or lonarr(36) -; seed to use for random number generation, leave undefined to use a -; seed generated from the system clock; new seed will be output -;- -function mg_sample, nValues, nIndices, seed=seed - compile_opt strictarr - - ; get random nIndices by finding the indices of the smallest nIndices in a - ; array of random values - values = randomu(seed, nValues) - - ; our random values are uniformly distributed, so ideally the nIndices - ; smallest values are in the first bin of the below histogram - nBins = nValues / nIndices - h = histogram(values, nbins=nBins, reverse_indices=ri) - - ; the candidates for being in the first nIndices will live in bins 0..bin - nCandidates = 0L - for bin = 0L, nBins - 1L do begin - nCandidates += h[bin] - if (nCandidates ge nIndices) then break - endfor - - ; get the candidates and sort them - candidates = ri[ri[0] : ri[bin + 1L] - 1L] - sortedCandidates = sort(values[candidates]) - - ; return the first nIndices of them - return, (candidates[sortedCandidates])[0:nIndices-1L] -end - - -; main-level example program - -r = randomu(seed, 10) -print, r -ind = mg_sample(10, 3, seed=seed) -print, ind -print, r[ind] - -end \ No newline at end of file diff --git a/tests/examplefiles/iex_example b/tests/examplefiles/iex_example deleted file mode 100644 index 22407e4e..00000000 --- a/tests/examplefiles/iex_example +++ /dev/null @@ -1,23 +0,0 @@ -iex> :" multi -...> line ' \s \123 \x20 -...> atom" -:" multi\n line ' S \natom" - -iex(1)> <<"hello"::binary, c :: utf8, x::[4, unit(2)]>> = "hello™1" -"hello™1" - -iex(2)> c -8482 - -iex> 1 + :atom -** (ArithmeticError) bad argument in arithmetic expression - :erlang.+(1, :atom) - -iex(3)> 1 + -...(3)> 2 + -...(3)> 3 -6 - -iex> IO.puts "Hello world" -Hello world -:ok diff --git a/tests/examplefiles/inet_pton6.dg b/tests/examplefiles/inet_pton6.dg deleted file mode 100644 index 3813d5b8..00000000 --- a/tests/examplefiles/inet_pton6.dg +++ /dev/null @@ -1,71 +0,0 @@ -import '/re' -import '/sys' - - -# IPv6address = hexpart [ ":" IPv4address ] -# IPv4address = 1*3DIGIT "." 1*3DIGIT "." 1*3DIGIT "." 1*3DIGIT -# hexpart = [ hexseq ] [ "::" [ hexseq ] ] -# hexseq = hex4 *( ":" hex4) -# hex4 = 1*4HEXDIG -hexpart = r'({0}|)(?:::({0}|)|)'.format r'(?:[\da-f]{1,4})(?::[\da-f]{1,4})*' -addrv4 = r'(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})' -addrv6 = re.compile $ r'(?i)(?:{})(?::{})?$'.format hexpart addrv4 - - -# Parse a base-N number given a list of its digits. -# -# :param q: the number of digits in that numeral system -# -# :param digits: an iterable of integers in range [0..q] -# -# :return: a decimal integer -# -base_n = q digits -> foldl (x y -> x * q + y) 0 digits - - -# Parse a sequence of hexadecimal numbers -# -# :param q: a string of colon-separated base-16 integers -# -# :return: an iterable of Python ints -# -unhex = q -> q and map (p -> int p 16) (q.split ':') - - -# Parse an IPv6 address as specified in RFC 4291. -# -# :param address: a string, obviously. -# -# :return: an integer which, written in binary form, points to the same node. -# -inet_pton6 = address -> - not (match = addrv6.match address) => raise $ ValueError 'not a valid IPv6 address' - start, end, *ipv4 = match.groups! - - is_ipv4 = not $ None in ipv4 - shift = (7 - start.count ':' - 2 * is_ipv4) * 16 - - (end is None and shift) or shift < 0 => raise $ ValueError 'not a valid IPv6 address' - hexaddr = (base_n 0x10000 (unhex start) << shift) + base_n 0x10000 (unhex $ end or '') - if (is_ipv4 => (hexaddr << 32) + base_n 0x100 (map int ipv4)) (otherwise => hexaddr) - - -inet6_type = q -> if - q == 0 => 'unspecified' - q == 1 => 'loopback' - (q >> 32) == 0x000000000000ffff => 'IPv4-mapped' - (q >> 64) == 0xfe80000000000000 => 'link-local' - (q >> 120) != 0x00000000000000ff => 'general unicast' - (q >> 112) % (1 << 4) == 0x0000000000000000 => 'multicast w/ reserved scope value' - (q >> 112) % (1 << 4) == 0x000000000000000f => 'multicast w/ reserved scope value' - (q >> 112) % (1 << 4) == 0x0000000000000001 => 'interface-local multicast' - (q >> 112) % (1 << 4) == 0x0000000000000004 => 'admin-local multicast' - (q >> 112) % (1 << 4) == 0x0000000000000005 => 'site-local multicast' - (q >> 112) % (1 << 4) == 0x0000000000000008 => 'organization-local multicast' - (q >> 112) % (1 << 4) == 0x000000000000000e => 'global multicast' - (q >> 112) % (1 << 4) != 0x0000000000000002 => 'multicast w/ unknown scope value' - (q >> 24) % (1 << 112) == 0x00000000000001ff => 'solicited-node multicast' - otherwise => 'link-local multicast' - - -print $ (x -> inet6_type x, hex x) $ inet_pton6 $ sys.stdin.read!.strip! diff --git a/tests/examplefiles/inform6_example b/tests/examplefiles/inform6_example deleted file mode 100644 index 6fa1fe5b..00000000 --- a/tests/examplefiles/inform6_example +++ /dev/null @@ -1,375 +0,0 @@ -!% $SMALL ! This is ICL, not a comment. -!% -w - -!% A comprehensive test of Inform6Lexer. - -Switches d2SDq; - -Constant Story "Informal Testing"; -Constant Headline "^Not a game.^";!% This is a comment, not ICL. - -Release 3; -Serial "151213"; -Version 5; - -Ifndef TARGET_ZCODE; -Ifndef TARGET_GLULX; -Ifndef WORDSIZE; -Default WORDSIZE 2; -Constant TARGET_ZCODE; -Endif; -Endif; -Endif; - -Ifv3; Message "Compiling to version 3"; Endif; -Ifv5; Message "Not compiling to version 3"; endif; -ifdef TARGET_ZCODE; -#IFTRUE (#version_number == 5); -Message "Compiling to version 5"; -#ENDIF; -endif ; - -Replace CreatureTest; - -Include "Parser"; -Include "VerbLib"; - -# ! A hash is optional at the top level. -Object kitchen "Kitchen" - with description "You are in a kitchen.", - arr 1 2 3 4, - has light; - -#[ Initialise; - location = kitchen; - print "v"; inversion; "^"; -]; - -Ifdef VN_1633; -Replace IsSeeThrough IsSeeThroughOrig; -[ IsSeeThrough * o; - return o hasnt opaque || IsSeeThroughOrig(o); -]; -Endif; - -Abbreviate "test"; - -Array table buffer 260; - -Attribute reversed; -Attribute opaque alias locked; -Constant to reversed; - -Property long additive additive long alias; -Property long long long wingspan alias alias; - -Class Flier with wingspan 5; -Class Bird(10) has animate class Flier with wingspan 2; - -Constant Constant1; -Constant Constant2 Constant1; -Constant Constant3 = Constant2; -Ifdef VN_1633; Undef Constant; Endif; - -Ifdef VN_1633; -Dictionary 'word' 1 2; -Ifnot; -Dictionary dict_word "word"; -Endif; - -Fake_action NotReal; - -Global global1; -Global global2 = 69105; - -Lowstring low_string "low string"; - -Iftrue false; -Message error "Uh-oh!^~false~ shouldn't be ~true~."; -Endif; -Iffalse true; -Message fatalerror "Uh-oh!^~true~ shouldn't be ~false~."; -Endif; - -Nearby person "person" - with name 'person', - description "This person is barely implemented.", - life [ * x y z; - Ask: print_ret (The) self, " says nothing."; - Answer: print (The) self, " didn't say anything.^"; rfalse; - ] - has has animate transparent; - -Object -> -> test_tube "test tube" - with name 'test' "tube" 'testtube', - has ~openable ~opaque container; - -Bird -> pigeon - with name 'pigeon', - description [; - "The pigeon has a wingspan of ", self.&wingspan-->0, " wing units."; - ]; - -Object -> "thimble" with name 'thimble'; - -Object -> pebble "pebble" with name 'pebble'; - -Ifdef TARGET_ZCODE; Trace objects; Endif; - -Statusline score; - -Stub StubR 3; - -Ifdef TARGET_ZCODE; -Zcharacter "abcdefghijklmnopqrstuvwxyz" - "ABCDEFGHIJKLMNOPQRSTUVWXYZ" - "123456789.,!?_#'0/@{005C}-:()"; -Zcharacter table '@!!' '@<<' '@'A'; -Zcharacter table + '@AE' '@{dc}' '@et' '@:y'; -Ifnot; -Ifdef TARGET_GLULX; -Message "Glulx doesn't use ~Zcharacter~.^Oh well."; ! '~' and '^' work here. -Ifnot; -Message warning "Uh-oh! ^~^"; ! They don't work in other Messages. -Endif; -Endif; - -Include "Grammar"; - -Verb"acquire"'collect'='take'; - -[ NounFilter; return noun ofclass Bird; ]; - -[ ScopeFilter obj; - switch (scope_stage) { - 1: rtrue; - 2: objectloop (obj in compass) PlaceInScope(obj); - 3: "Nothing is in scope."; - } -]; - -Verb meta "t" 'test' - * 'held' held -> TestHeld - * number -> TestNumber - * reversed -> TestAttribute - * 'creature' creature -> TestCreature - * 'multiheld' multiheld -> TestMultiheld - * 'm' multiexcept 'into'/"in" noun -> TestMultiexcept - * 'm' multiinside 'from' noun -> TestMultiinside - * multi -> TestMulti - * 'filter'/'f' noun=NounFilter -> TestNounFilter - * 'filter'/'f' scope=ScopeFilter -> TestScopeFilter - * 'special' special -> TestSpecial - * topic -> TestTopic; - -Verb 'reverse' 'swap' 'exchange' - * held 'for' noun -> reverse - * noun 'with' noun -> reverse reverse; - -Extend "t" last * noun -> TestNoun; - -Extend 't' first * -> Test; - -Extend 'wave' replace * -> NewWave; - -Extend only 'feel' 'touch' replace * noun -> Feel; - -[ TestSub "a\ - " b o "@@98"; ! Not an escape sequence. - string 25 low_string; - print "Test what?> "; - table->0 = 260; - parse->0 = 61; - #Ifdef TARGET_ZCODE; - read buffer parse; - #Ifnot; ! TARGET_GLULX - KeyboardPrimitive(buffer, parse); - #Endif; ! TARGET_ - switch (parse-->1) { - 'save': - #Ifdef TARGET_ZCODE; - #Ifv3; - @save ?saved; - #Ifnot; - save saved; - #Endif; - #Endif; - print "Saving failed.^"; - 'restore': - #Ifdef TARGET_ZCODE; - restore saved; - #Endif; - print "Restoring failed.^"; - 'restart': - @restart; - 'quit', 'q//': - quit; - return 2; rtrue; rfalse; return; - 'print', 'p//': - print "Print:^", - " (string): ", (string) "xyzzy^", - " (number): ", (number) 123, "^", - " (char): ", (char) 'x', "^", - " (address): ", (address) 'plugh//p', "^", - " (The): ", (The) person, "^", - " (the): ", (the) person, "^", - " (A): ", (A) person, "^", - " (a): ", (a) person, "^", - " (an): ", (an) person, "^", - " (name): ", (name) person, "^", - " (object): ", (object) person, "^", - " (property): ", (property) alias, "^", - " (): ", (LanguageNumber) 123, "^", - " : ", a * 2 - 1, "^", - " (): ", (a + person), "^"; - print "Escapes:^", - " by mnemonic: @!! @<< @'A @AE @et @:y^", - " by decimal value: @@64 @@126^", - " by Unicode value: @{DC}@{002b}^", - " by string variable: @25^"; - 'font', 'style': - font off; print "font off^"; - font on; print "font on^"; - style reverse; print "style reverse^"; style roman; - style bold; print "style bold^"; - style underline; print "style underline^"; - style fixed; print "style fixed^"; - style roman; print "style roman^"; - 'statements': - spaces 8; - objectloop (o) { - print "objectloop (o): ", (the) o, "^"; - } - objectloop (o in compass) { ! 'in' is a keyword - print "objectloop (o in compass): ", (the) o, "^"; - } - objectloop (o in compass && true) { ! 'in' is an operator - print "objectloop (o in compass && true): ", (the) o, "^"; - } - objectloop (o from se_obj) { - print "objectloop (o from se_obj): ", (the) o, "^"; - } - objectloop (o near person) { - print "objectloop (o near person): ", (the) o, "^"; - } - #Ifdef TARGET_ZCODE; - #Trace assembly on; -@ ! This is assembly. - add -4 ($$1+$3)*2 -> b; - @get_sibling test_tube -> b ?saved; - @inc [b]; - @je sp (1+3*0) ? equal; - @je 1 ((sp)) ?~ different; - .! This is a label: - equal; - print "sp == 1^"; - jump label; - .different; - print "sp @@126= 1^"; - .label; - #Trace off; #Endif; ! TARGET_ZCODE - a = random(10); - switch (a) { - 1, 9: - box "Testing oneself is best when done alone." - " -- Jimmy Carter"; - 2, 6, to, 3 to 5, to to to: - ; - #Ifdef VN_1633; - ; - #Endif; - a = ##Drop; - < ! The angle brackets may be separated by whitespace. - < (a) pigeon > >; - default: - do { - give person general ~general; - } until (person provides life && ~~false); - if (a == 7) a = 4; - else a = 5; - } - 'expressions': - a = 1+1-1*1/1%1&1|1&&1||1==(1~=(1>(1<(1>=(1<=1))))); - a++; ++a; a--; --a; - a = person.life; - a = kitchen.&arr; - a = kitchen.#arr; - a = Bird::wingspan; - a = kitchen has general; - a = kitchen hasnt general; - a = kitchen provides arr; - a = person in kitchen; - a = person notin kitchen; - a = person ofclass Bird; - a = a == 0 or 1; - a = StubR(); - a = StubR(a); - a = StubR(, a); - a = "string"; - a = 'word'; - a = '''; ! character - a = $09afAF; - a = $$01; - a = ##Eat; a = #a$Eat; - a = #g$self; - a = #n$!word; - a = #r$StubR; - a = #dict_par1; - default: - for (a = 2, b = a; (a < buffer->1 + 2) && (Bird::wingspan): ++a, b--) { - print (char) buffer->a; - } - new_line; - for (::) break; - } - .saved;; -]; - -[ TestNumberSub; - print_ret parsed_number, " is ", (number) parsed_number, "."; -]; - -[ TestAttributeSub; print_ret (The) noun, " has been reversed."; ]; - -[ CreatureTest obj; return obj has animate; ]; - -[ TestCreatureSub; print_ret (The) noun, " is a creature."; ]; - -[ TestMultiheldSub; print_ret "You are holding ", (the) noun, "."; ]; - -[ TestMultiexceptSub; "You test ", (the) noun, " with ", (the) second, "."; ]; - -[ TestMultiinsideSub; "You test ", (the) noun, " from ", (the) second, "."; ]; - -[ TestMultiSub; print_ret (The) noun, " is a thing."; ]; - -[ TestNounFilterSub; print_ret (The) noun, " is a bird."; ]; - -[ TestScopeFilterSub; print_ret (The) noun, " is a direction."; ]; - -[ TestSpecialSub; "Your lucky number is ", parsed_number, "."; ]; - -[ TestTopicSub; "You discuss a topic."; ]; - -[ TestNounSub; "That is ", (a) noun, "."; ]; - -[ TestHeldSub; "You are holding ", (a) noun, "."; ]; - -[ NewWaveSub; "That would be foolish."; ]; - -[ FeelSub; print_ret (The) noun, " feels normal."; ]; - -[ ReverseSub from; - from = parent(noun); - move noun to parent(second); - if (from == to) - move second to to; - else - move second to from; - give noun to; - from = to; - give second from; - "You swap ", (the) noun, " and ", (the) second, "."; -]; - -End: The End directive ends the source code. diff --git a/tests/examplefiles/interp.scala b/tests/examplefiles/interp.scala deleted file mode 100644 index 4131b75e..00000000 --- a/tests/examplefiles/interp.scala +++ /dev/null @@ -1,10 +0,0 @@ -val n = 123; -val a = s"n=$n"; -val a2 = s"n=$n''"; -val b = s"""n=$n"""; -val c = f"n=$n%f"; -val d = f"""n=$n%f"""; -val d2 = s"""a""""; -val e = s"abc\u00e9"; -val f = s"a${n}b"; -val g = s"a${n + 1}b"; diff --git a/tests/examplefiles/intro.ik b/tests/examplefiles/intro.ik deleted file mode 100644 index 03fcee39..00000000 --- a/tests/examplefiles/intro.ik +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/ioke - -Ioke = LanguageExperiment with( - goal: :expressiveness, - data: as(code), - code: as(data), - features: [ - :dynamic, - :object_oriented, - :prototype_based, - :homoiconic, - :macros - ], - runtimes:(JVM, CLR), - inspirations: set(Io, Smalltalk, Ruby, Lisp) -) - -hello = method("Every example needs a hello world!", - name, - "hello, #{name}!" println) - -Ioke inspirations select( - features include?(:object_oriented) -) each(x, hello(x name)) diff --git a/tests/examplefiles/ints.php b/tests/examplefiles/ints.php deleted file mode 100644 index 516ab2c8..00000000 --- a/tests/examplefiles/ints.php +++ /dev/null @@ -1,10 +0,0 @@ - - diff --git a/tests/examplefiles/intsyn.fun b/tests/examplefiles/intsyn.fun deleted file mode 100644 index 777b0fdb..00000000 --- a/tests/examplefiles/intsyn.fun +++ /dev/null @@ -1,675 +0,0 @@ -(* Internal Syntax *) -(* Author: Frank Pfenning, Carsten Schuermann *) -(* Modified: Roberto Virga *) - -functor IntSyn (structure Global : GLOBAL) :> INTSYN = -struct - - type cid = int (* Constant identifier *) - type name = string (* Variable name *) - type mid = int (* Structure identifier *) - type csid = int (* CS module identifier *) - - - (* Contexts *) - datatype 'a Ctx = (* Contexts *) - Null (* G ::= . *) - | Decl of 'a Ctx * 'a (* | G, D *) - - (* ctxPop (G) => G' - Invariant: G = G',D - *) - fun ctxPop (Decl (G, D)) = G - - exception Error of string (* raised if out of space *) - (* ctxLookup (G, k) = D, kth declaration in G from right to left - Invariant: 1 <= k <= |G|, where |G| is length of G - *) - - fun ctxLookup (Decl (G', D), 1) = D - | ctxLookup (Decl (G', _), k') = ctxLookup (G', k'-1) -(* | ctxLookup (Null, k') = (print ("Looking up k' = " ^ Int.toString k' ^ "\n"); raise Error "Out of Bounce\n")*) - (* ctxLookup (Null, k') should not occur by invariant *) - - (* ctxLength G = |G|, the number of declarations in G *) - fun ctxLength G = - let - fun ctxLength' (Null, n) = n - | ctxLength' (Decl(G, _), n)= ctxLength' (G, n+1) - in - ctxLength' (G, 0) - end - - type FgnExp = exn (* foreign expression representation *) - exception UnexpectedFgnExp of FgnExp - (* raised by a constraint solver - if passed an incorrect arg *) - - type FgnCnstr = exn (* foreign unification constraint - representation *) - exception UnexpectedFgnCnstr of FgnCnstr - (* raised by a constraint solver - if passed an incorrect arg *) - - datatype Depend = (* Dependency information *) - No (* P ::= No *) - | Maybe (* | Maybe *) - | Meta (* | Meta *) - - (* Expressions *) - - datatype Uni = (* Universes: *) - Kind (* L ::= Kind *) - | Type (* | Type *) - - datatype Exp = (* Expressions: *) - Uni of Uni (* U ::= L *) - | Pi of (Dec * Depend) * Exp (* | bPi (D, P). V *) - | Root of Head * Spine (* | C @ S *) - | Redex of Exp * Spine (* | U @ S *) - | Lam of Dec * Exp (* | lam D. U *) - | EVar of Exp option ref * Dec Ctx * Exp * (Cnstr ref) list ref - (* | X : G|-V, Cnstr *) - - | EClo of Exp * Sub (* | U[s] *) - | AVar of Exp option ref (* | A *) - | NVar of int (* | n (linear, fully applied) *) - (* grafting variable *) - - | FgnExp of csid * FgnExp - (* | (foreign expression) *) - - and Head = (* Heads: *) - BVar of int (* H ::= k *) - | Const of cid (* | c *) - | Proj of Block * int (* | #k(b) *) - | Skonst of cid (* | c# *) - | Def of cid (* | d *) - | NSDef of cid (* | d (non strict) *) - | FVar of name * Exp * Sub (* | F[s] *) - | FgnConst of csid * ConDec (* | (foreign constant) *) - - and Spine = (* Spines: *) - Nil (* S ::= Nil *) - | App of Exp * Spine (* | U ; S *) - | SClo of Spine * Sub (* | S[s] *) - - and Sub = (* Explicit substitutions: *) - Shift of int (* s ::= ^n *) - | Dot of Front * Sub (* | Ft.s *) - - and Front = (* Fronts: *) - Idx of int (* Ft ::= k *) - | Exp of Exp (* | U *) - | Axp of Exp (* | U (assignable) *) - | Block of Block (* | _x *) - | Undef (* | _ *) - - and Dec = (* Declarations: *) - Dec of name option * Exp (* D ::= x:V *) - | BDec of name option * (cid * Sub) (* | v:l[s] *) - | ADec of name option * int (* | v[^-d] *) - | NDec of name option - - and Block = (* Blocks: *) - Bidx of int (* b ::= v *) - | LVar of Block option ref * Sub * (cid * Sub) - (* | L(l[^k],t) *) - | Inst of Exp list (* | u1, ..., Un *) - - - (* Constraints *) - - and Cnstr = (* Constraint: *) - Solved (* Cnstr ::= solved *) - | Eqn of Dec Ctx * Exp * Exp (* | G|-(U1 == U2) *) - | FgnCnstr of csid * FgnCnstr (* | (foreign) *) - - and Status = (* Status of a constant: *) - Normal (* inert *) - | Constraint of csid * (Dec Ctx * Spine * int -> Exp option) - (* acts as constraint *) - | Foreign of csid * (Spine -> Exp) (* is converted to foreign *) - - and FgnUnify = (* Result of foreign unify *) - Succeed of FgnUnifyResidual list - (* succeed with a list of residual operations *) - | Fail - - and FgnUnifyResidual = (* Residual of foreign unify *) - Assign of Dec Ctx * Exp * Exp * Sub - (* perform the assignment G |- X = U [ss] *) - | Delay of Exp * Cnstr ref - (* delay cnstr, associating it with all the rigid EVars in U *) - - (* Global signature *) - - and ConDec = (* Constant declaration *) - ConDec of string * mid option * int * Status - (* a : K : kind or *) - * Exp * Uni (* c : A : type *) - | ConDef of string * mid option * int (* a = A : K : kind or *) - * Exp * Exp * Uni (* d = M : A : type *) - * Ancestor (* Ancestor info for d or a *) - | AbbrevDef of string * mid option * int - (* a = A : K : kind or *) - * Exp * Exp * Uni (* d = M : A : type *) - | BlockDec of string * mid option (* %block l : SOME G1 PI G2 *) - * Dec Ctx * Dec list - - | BlockDef of string * mid option * cid list - (* %block l = (l1 | ... | ln) *) - - | SkoDec of string * mid option * int (* sa: K : kind or *) - * Exp * Uni (* sc: A : type *) - - and Ancestor = (* Ancestor of d or a *) - Anc of cid option * int * cid option (* head(expand(d)), height, head(expand[height](d)) *) - (* NONE means expands to {x:A}B *) - - datatype StrDec = (* Structure declaration *) - StrDec of string * mid option - - (* Form of constant declaration *) - datatype ConDecForm = - FromCS (* from constraint domain *) - | Ordinary (* ordinary declaration *) - | Clause (* %clause declaration *) - - (* Type abbreviations *) - type dctx = Dec Ctx (* G = . | G,D *) - type eclo = Exp * Sub (* Us = U[s] *) - type bclo = Block * Sub (* Bs = B[s] *) - type cnstr = Cnstr ref - -(* exception Error of string (* raised if out of space *) *) - - - structure FgnExpStd = struct - - structure ToInternal = FgnOpnTable (type arg = unit - type result = Exp) - - structure Map = FgnOpnTable (type arg = Exp -> Exp - type result = Exp) - - structure App = FgnOpnTable (type arg = Exp -> unit - type result = unit) - - structure EqualTo = FgnOpnTable (type arg = Exp - type result = bool) - - structure UnifyWith = FgnOpnTable (type arg = Dec Ctx * Exp - type result = FgnUnify) - - - - fun fold csfe f b = let - val r = ref b - fun g U = r := f (U,!r) - in - App.apply csfe g ; !r - end - - end - - structure FgnCnstrStd = struct - - structure ToInternal = FgnOpnTable (type arg = unit - type result = (Dec Ctx * Exp) list) - - structure Awake = FgnOpnTable (type arg = unit - type result = bool) - - structure Simplify = FgnOpnTable (type arg = unit - type result = bool) - - end - - fun conDecName (ConDec (name, _, _, _, _, _)) = name - | conDecName (ConDef (name, _, _, _, _, _, _)) = name - | conDecName (AbbrevDef (name, _, _, _, _, _)) = name - | conDecName (SkoDec (name, _, _, _, _)) = name - | conDecName (BlockDec (name, _, _, _)) = name - | conDecName (BlockDef (name, _, _)) = name - - fun conDecParent (ConDec (_, parent, _, _, _, _)) = parent - | conDecParent (ConDef (_, parent, _, _, _, _, _)) = parent - | conDecParent (AbbrevDef (_, parent, _, _, _, _)) = parent - | conDecParent (SkoDec (_, parent, _, _, _)) = parent - | conDecParent (BlockDec (_, parent, _, _)) = parent - | conDecParent (BlockDef (_, parent, _)) = parent - - - (* conDecImp (CD) = k - - Invariant: - If CD is either a declaration, definition, abbreviation, or - a Skolem constant - then k stands for the number of implicit elements. - *) - fun conDecImp (ConDec (_, _, i, _, _, _)) = i - | conDecImp (ConDef (_, _, i, _, _, _, _)) = i - | conDecImp (AbbrevDef (_, _, i, _, _, _)) = i - | conDecImp (SkoDec (_, _, i, _, _)) = i - | conDecImp (BlockDec (_, _, _, _)) = 0 (* watch out -- carsten *) - - fun conDecStatus (ConDec (_, _, _, status, _, _)) = status - | conDecStatus _ = Normal - - (* conDecType (CD) = V - - Invariant: - If CD is either a declaration, definition, abbreviation, or - a Skolem constant - then V is the respective type - *) - fun conDecType (ConDec (_, _, _, _, V, _)) = V - | conDecType (ConDef (_, _, _, _, V, _, _)) = V - | conDecType (AbbrevDef (_, _, _, _, V, _)) = V - | conDecType (SkoDec (_, _, _, V, _)) = V - - - (* conDecBlock (CD) = (Gsome, Lpi) - - Invariant: - If CD is block definition - then Gsome is the context of some variables - and Lpi is the list of pi variables - *) - fun conDecBlock (BlockDec (_, _, Gsome, Lpi)) = (Gsome, Lpi) - - (* conDecUni (CD) = L - - Invariant: - If CD is either a declaration, definition, abbreviation, or - a Skolem constant - then L is the respective universe - *) - fun conDecUni (ConDec (_, _, _, _, _, L)) = L - | conDecUni (ConDef (_, _, _, _, _, L, _)) = L - | conDecUni (AbbrevDef (_, _, _, _, _, L)) = L - | conDecUni (SkoDec (_, _, _, _, L)) = L - - - fun strDecName (StrDec (name, _)) = name - - fun strDecParent (StrDec (_, parent)) = parent - - local - val maxCid = Global.maxCid - val dummyEntry = ConDec("", NONE, 0, Normal, Uni (Kind), Kind) - val sgnArray = Array.array (maxCid+1, dummyEntry) - : ConDec Array.array - val nextCid = ref(0) - - val maxMid = Global.maxMid - val sgnStructArray = Array.array (maxMid+1, StrDec("", NONE)) - : StrDec Array.array - val nextMid = ref (0) - - in - (* Invariants *) - (* Constant declarations are all well-typed *) - (* Constant declarations are stored in beta-normal form *) - (* All definitions are strict in all their arguments *) - (* If Const(cid) is valid, then sgnArray(cid) = ConDec _ *) - (* If Def(cid) is valid, then sgnArray(cid) = ConDef _ *) - - fun sgnClean (i) = if i >= !nextCid then () - else (Array.update (sgnArray, i, dummyEntry); - sgnClean (i+1)) - - fun sgnReset () = ((* Fri Dec 20 12:04:24 2002 -fp *) - (* this circumvents a space leak *) - sgnClean (0); - nextCid := 0; nextMid := 0) - fun sgnSize () = (!nextCid, !nextMid) - - fun sgnAdd (conDec) = - let - val cid = !nextCid - in - if cid > maxCid - then raise Error ("Global signature size " ^ Int.toString (maxCid+1) ^ " exceeded") - else (Array.update (sgnArray, cid, conDec) ; - nextCid := cid + 1; - cid) - end - - (* 0 <= cid < !nextCid *) - fun sgnLookup (cid) = Array.sub (sgnArray, cid) - - fun sgnApp (f) = - let - fun sgnApp' (cid) = - if cid = !nextCid then () else (f cid; sgnApp' (cid+1)) - in - sgnApp' (0) - end - - fun sgnStructAdd (strDec) = - let - val mid = !nextMid - in - if mid > maxMid - then raise Error ("Global signature size " ^ Int.toString (maxMid+1) ^ " exceeded") - else (Array.update (sgnStructArray, mid, strDec) ; - nextMid := mid + 1; - mid) - end - - (* 0 <= mid < !nextMid *) - fun sgnStructLookup (mid) = Array.sub (sgnStructArray, mid) - - (* A hack used in Flit - jcreed 6/05 *) - fun rename (cid, new) = - let - val newConDec = case sgnLookup cid of - ConDec (n,m,i,s,e,u) => ConDec(new,m,i,s,e,u) - | ConDef (n,m,i,e,e',u,a) => ConDef(new,m,i,e,e',u,a) - | AbbrevDef (n,m,i,e,e',u) => AbbrevDef (new,m,i,e,e',u) - | BlockDec (n,m,d,d') => BlockDec (new,m,d,d') - | SkoDec (n,m,i,e,u) => SkoDec (new,m,i,e,u) - in - Array.update (sgnArray, cid, newConDec) - end - - end - - fun constDef (d) = - (case sgnLookup (d) - of ConDef(_, _, _, U,_, _, _) => U - | AbbrevDef (_, _, _, U,_, _) => U) - - fun constType (c) = conDecType (sgnLookup c) - fun constImp (c) = conDecImp (sgnLookup c) - fun constUni (c) = conDecUni (sgnLookup c) - fun constBlock (c) = conDecBlock (sgnLookup c) - - fun constStatus (c) = - (case sgnLookup (c) - of ConDec (_, _, _, status, _, _) => status - | _ => Normal) - - - (* Explicit Substitutions *) - - (* id = ^0 - - Invariant: - G |- id : G id is patsub - *) - val id = Shift(0) - - (* shift = ^1 - - Invariant: - G, V |- ^ : G ^ is patsub - *) - val shift = Shift(1) - - (* invShift = ^-1 = _.^0 - Invariant: - G |- ^-1 : G, V ^-1 is patsub - *) - val invShift = Dot(Undef, id) - - - (* comp (s1, s2) = s' - - Invariant: - If G' |- s1 : G - and G'' |- s2 : G' - then s' = s1 o s2 - and G'' |- s1 o s2 : G - - If s1, s2 patsub - then s' patsub - *) - fun comp (Shift (0), s) = s - (* next line is an optimization *) - (* roughly 15% on standard suite for Twelf 1.1 *) - (* Sat Feb 14 10:15:16 1998 -fp *) - | comp (s, Shift (0)) = s - | comp (Shift (n), Dot (Ft, s)) = comp (Shift (n-1), s) - | comp (Shift (n), Shift (m)) = Shift (n+m) - | comp (Dot (Ft, s), s') = Dot (frontSub (Ft, s'), comp (s, s')) - - (* bvarSub (n, s) = Ft' - - Invariant: - If G |- s : G' G' |- n : V - then Ft' = Ftn if s = Ft1 .. Ftn .. ^k - or Ft' = ^(n+k) if s = Ft1 .. Ftm ^k and m Bidx k' - | Block B => B) - | blockSub (LVar (ref (SOME B), sk, _), s) = - blockSub (B, comp (sk, s)) - (* -fp Sun Dec 1 21:18:30 2002 *) - (* --cs Sun Dec 1 11:25:41 2002 *) - (* Since always . |- t : Gsome, discard s *) - (* where is this needed? *) - (* Thu Dec 6 20:30:26 2001 -fp !!! *) - | blockSub (LVar (r as ref NONE, sk, (l, t)), s) = - LVar(r, comp(sk, s), (l, t)) - (* was: - LVar (r, comp(sk, s), (l, comp (t, s))) - July 22, 2010 -fp -cs - *) - (* comp(^k, s) = ^k' for some k' by invariant *) - | blockSub (L as Inst ULs, s') = Inst (map (fn U => EClo (U, s')) ULs) - (* this should be right but somebody should verify *) - - (* frontSub (Ft, s) = Ft' - - Invariant: - If G |- s : G' G' |- Ft : V - then Ft' = Ft [s] - and G |- Ft' : V [s] - - NOTE: EClo (U, s) might be undefined, so if this is ever - computed eagerly, we must introduce an "Undefined" exception, - raise it in whnf and handle it here so Exp (EClo (U, s)) => Undef - *) - and frontSub (Idx (n), s) = bvarSub (n, s) - | frontSub (Exp (U), s) = Exp (EClo (U, s)) - | frontSub (Undef, s) = Undef - | frontSub (Block (B), s) = Block (blockSub (B, s)) - - (* decSub (x:V, s) = D' - - Invariant: - If G |- s : G' G' |- V : L - then D' = x:V[s] - and G |- V[s] : L - *) - (* First line is an optimization suggested by cs *) - (* D[id] = D *) - (* Sat Feb 14 18:37:44 1998 -fp *) - (* seems to have no statistically significant effect *) - (* undo for now Sat Feb 14 20:22:29 1998 -fp *) - (* - fun decSub (D, Shift(0)) = D - | decSub (Dec (x, V), s) = Dec (x, EClo (V, s)) - *) - fun decSub (Dec (x, V), s) = Dec (x, EClo (V, s)) - | decSub (NDec x, s) = NDec x - | decSub (BDec (n, (l, t)), s) = BDec (n, (l, comp (t, s))) - - (* dot1 (s) = s' - - Invariant: - If G |- s : G' - then s' = 1. (s o ^) - and for all V s.t. G' |- V : L - G, V[s] |- s' : G', V - - If s patsub then s' patsub - *) - (* first line is an optimization *) - (* roughly 15% on standard suite for Twelf 1.1 *) - (* Sat Feb 14 10:16:16 1998 -fp *) - fun dot1 (s as Shift (0)) = s - | dot1 s = Dot (Idx(1), comp(s, shift)) - - (* invDot1 (s) = s' - invDot1 (1. s' o ^) = s' - - Invariant: - s = 1 . s' o ^ - If G' |- s' : G - (so G',V[s] |- s : G,V) - *) - fun invDot1 (s) = comp (comp(shift, s), invShift) - - - (* Declaration Contexts *) - - (* ctxDec (G, k) = x:V - Invariant: - If |G| >= k, where |G| is size of G, - then G |- k : V and G |- V : L - *) - fun ctxDec (G, k) = - let (* ctxDec' (G'', k') = x:V - where G |- ^(k-k') : G'', 1 <= k' <= k - *) - fun ctxDec' (Decl (G', Dec (x, V')), 1) = Dec (x, EClo (V', Shift (k))) - | ctxDec' (Decl (G', BDec (n, (l, s))), 1) = BDec (n, (l, comp (s, Shift (k)))) - | ctxDec' (Decl (G', _), k') = ctxDec' (G', k'-1) - (* ctxDec' (Null, k') should not occur by invariant *) - in - ctxDec' (G, k) - end - - (* blockDec (G, v, i) = V - - Invariant: - If G (v) = l[s] - and Sigma (l) = SOME Gsome BLOCK Lblock - and G |- s : Gsome - then G |- pi (v, i) : V - *) - - fun blockDec (G, v as (Bidx k), i) = - let - val BDec (_, (l, s)) = ctxDec (G, k) - (* G |- s : Gsome *) - val (Gsome, Lblock) = conDecBlock (sgnLookup l) - fun blockDec' (t, D :: L, 1, j) = decSub (D, t) - | blockDec' (t, _ :: L, n, j) = - blockDec' (Dot (Exp (Root (Proj (v, j), Nil)), t), - L, n-1, j+1) - in - blockDec' (s, Lblock, i, 1) - end - - - (* EVar related functions *) - - (* newEVar (G, V) = newEVarCnstr (G, V, nil) *) - fun newEVar (G, V) = EVar(ref NONE, G, V, ref nil) - - (* newAVar G = new AVar (assignable variable) *) - (* AVars carry no type, ctx, or cnstr *) - fun newAVar () = AVar(ref NONE) - - (* newTypeVar (G) = X, X new - where G |- X : type - *) - fun newTypeVar (G) = EVar(ref NONE, G, Uni(Type), ref nil) - - (* newLVar (l, s) = (l[s]) *) - fun newLVar (sk, (cid, t)) = LVar (ref NONE, sk, (cid, t)) - - (* Definition related functions *) - (* headOpt (U) = SOME(H) or NONE, U should be strict, normal *) - fun headOpt (Root (H, _)) = SOME(H) - | headOpt (Lam (_, U)) = headOpt U - | headOpt _ = NONE - - fun ancestor' (NONE) = Anc(NONE, 0, NONE) - | ancestor' (SOME(Const(c))) = Anc(SOME(c), 1, SOME(c)) - | ancestor' (SOME(Def(d))) = - (case sgnLookup(d) - of ConDef(_, _, _, _, _, _, Anc(_, height, cOpt)) - => Anc(SOME(d), height+1, cOpt)) - | ancestor' (SOME _) = (* FgnConst possible, BVar impossible by strictness *) - Anc(NONE, 0, NONE) - (* ancestor(U) = ancestor info for d = U *) - fun ancestor (U) = ancestor' (headOpt U) - - (* defAncestor(d) = ancestor of d, d must be defined *) - fun defAncestor (d) = - (case sgnLookup(d) - of ConDef(_, _, _, _, _, _, anc) => anc) - - (* Type related functions *) - - (* targetHeadOpt (V) = SOME(H) or NONE - where H is the head of the atomic target type of V, - NONE if V is a kind or object or have variable type. - Does not expand type definitions. - *) - (* should there possibly be a FgnConst case? also targetFamOpt -kw *) - fun targetHeadOpt (Root (H, _)) = SOME(H) - | targetHeadOpt (Pi(_, V)) = targetHeadOpt V - | targetHeadOpt (Redex (V, S)) = targetHeadOpt V - | targetHeadOpt (Lam (_, V)) = targetHeadOpt V - | targetHeadOpt (EVar (ref (SOME(V)),_,_,_)) = targetHeadOpt V - | targetHeadOpt (EClo (V, s)) = targetHeadOpt V - | targetHeadOpt _ = NONE - (* Root(Bvar _, _), Root(FVar _, _), Root(FgnConst _, _), - EVar(ref NONE,..), Uni, FgnExp _ - *) - (* Root(Skonst _, _) can't occur *) - (* targetHead (A) = a - as in targetHeadOpt, except V must be a valid type - *) - fun targetHead (A) = valOf (targetHeadOpt A) - - (* targetFamOpt (V) = SOME(cid) or NONE - where cid is the type family of the atomic target type of V, - NONE if V is a kind or object or have variable type. - Does expand type definitions. - *) - fun targetFamOpt (Root (Const(cid), _)) = SOME(cid) - | targetFamOpt (Pi(_, V)) = targetFamOpt V - | targetFamOpt (Root (Def(cid), _)) = targetFamOpt (constDef cid) - | targetFamOpt (Redex (V, S)) = targetFamOpt V - | targetFamOpt (Lam (_, V)) = targetFamOpt V - | targetFamOpt (EVar (ref (SOME(V)),_,_,_)) = targetFamOpt V - | targetFamOpt (EClo (V, s)) = targetFamOpt V - | targetFamOpt _ = NONE - (* Root(Bvar _, _), Root(FVar _, _), Root(FgnConst _, _), - EVar(ref NONE,..), Uni, FgnExp _ - *) - (* Root(Skonst _, _) can't occur *) - (* targetFam (A) = a - as in targetFamOpt, except V must be a valid type - *) - fun targetFam (A) = valOf (targetFamOpt A) - -end; (* functor IntSyn *) - -structure IntSyn :> INTSYN = - IntSyn (structure Global = Global); diff --git a/tests/examplefiles/intsyn.sig b/tests/examplefiles/intsyn.sig deleted file mode 100644 index ea505362..00000000 --- a/tests/examplefiles/intsyn.sig +++ /dev/null @@ -1,286 +0,0 @@ -(* Internal Syntax *) -(* Author: Frank Pfenning, Carsten Schuermann *) -(* Modified: Roberto Virga *) - -signature INTSYN = -sig - - type cid = int (* Constant identifier *) - type mid = int (* Structure identifier *) - type csid = int (* CS module identifier *) - - - type FgnExp = exn (* foreign expression representation *) - exception UnexpectedFgnExp of FgnExp - (* raised by a constraint solver - if passed an incorrect arg *) - type FgnCnstr = exn (* foreign constraint representation *) - exception UnexpectedFgnCnstr of FgnCnstr - (* raised by a constraint solver - if passed an incorrect arg *) - - (* Contexts *) - - datatype 'a Ctx = (* Contexts *) - Null (* G ::= . *) - | Decl of 'a Ctx * 'a (* | G, D *) - - val ctxPop : 'a Ctx -> 'a Ctx - val ctxLookup: 'a Ctx * int -> 'a - val ctxLength: 'a Ctx -> int - - datatype Depend = (* Dependency information *) - No (* P ::= No *) - | Maybe (* | Maybe *) - | Meta (* | Meta *) - - (* expressions *) - - datatype Uni = (* Universes: *) - Kind (* L ::= Kind *) - | Type (* | Type *) - - datatype Exp = (* Expressions: *) - Uni of Uni (* U ::= L *) - | Pi of (Dec * Depend) * Exp (* | Pi (D, P). V *) - | Root of Head * Spine (* | H @ S *) - | Redex of Exp * Spine (* | U @ S *) - | Lam of Dec * Exp (* | lam D. U *) - | EVar of Exp option ref * Dec Ctx * Exp * (Cnstr ref) list ref - (* | X : G|-V, Cnstr *) - | EClo of Exp * Sub (* | U[s] *) - | AVar of Exp option ref (* | A *) - - | FgnExp of csid * FgnExp (* | (foreign expression) *) - - | NVar of int (* | n (linear, - fully applied variable - used in indexing *) - - and Head = (* Head: *) - BVar of int (* H ::= k *) - | Const of cid (* | c *) - | Proj of Block * int (* | #k(b) *) - | Skonst of cid (* | c# *) - | Def of cid (* | d (strict) *) - | NSDef of cid (* | d (non strict) *) - | FVar of string * Exp * Sub (* | F[s] *) - | FgnConst of csid * ConDec (* | (foreign constant) *) - - and Spine = (* Spines: *) - Nil (* S ::= Nil *) - | App of Exp * Spine (* | U ; S *) - | SClo of Spine * Sub (* | S[s] *) - - and Sub = (* Explicit substitutions: *) - Shift of int (* s ::= ^n *) - | Dot of Front * Sub (* | Ft.s *) - - and Front = (* Fronts: *) - Idx of int (* Ft ::= k *) - | Exp of Exp (* | U *) - | Axp of Exp (* | U *) - | Block of Block (* | _x *) - | Undef (* | _ *) - - and Dec = (* Declarations: *) - Dec of string option * Exp (* D ::= x:V *) - | BDec of string option * (cid * Sub) (* | v:l[s] *) - | ADec of string option * int (* | v[^-d] *) - | NDec of string option - - and Block = (* Blocks: *) - Bidx of int (* b ::= v *) - | LVar of Block option ref * Sub * (cid * Sub) - (* | L(l[^k],t) *) - | Inst of Exp list (* | U1, ..., Un *) - (* It would be better to consider having projections count - like substitutions, then we could have Inst of Sub here, - which would simplify a lot of things. - - I suggest however to wait until the next big overhaul - of the system -- cs *) - - -(* | BClo of Block * Sub (* | b[s] *) *) - - (* constraints *) - - and Cnstr = (* Constraint: *) - Solved (* Cnstr ::= solved *) - | Eqn of Dec Ctx * Exp * Exp (* | G|-(U1 == U2) *) - | FgnCnstr of csid * FgnCnstr (* | (foreign) *) - - and Status = (* Status of a constant: *) - Normal (* inert *) - | Constraint of csid * (Dec Ctx * Spine * int -> Exp option) - (* acts as constraint *) - | Foreign of csid * (Spine -> Exp) (* is converted to foreign *) - - and FgnUnify = (* Result of foreign unify *) - Succeed of FgnUnifyResidual list - (* succeed with a list of residual operations *) - | Fail - - and FgnUnifyResidual = - Assign of Dec Ctx * Exp * Exp * Sub - (* perform the assignment G |- X = U [ss] *) - | Delay of Exp * Cnstr ref - (* delay cnstr, associating it with all the rigid EVars in U *) - - (* Global signature *) - - and ConDec = (* Constant declaration *) - ConDec of string * mid option * int * Status - (* a : K : kind or *) - * Exp * Uni (* c : A : type *) - | ConDef of string * mid option * int (* a = A : K : kind or *) - * Exp * Exp * Uni (* d = M : A : type *) - * Ancestor (* Ancestor info for d or a *) - | AbbrevDef of string * mid option * int - (* a = A : K : kind or *) - * Exp * Exp * Uni (* d = M : A : type *) - | BlockDec of string * mid option (* %block l : SOME G1 PI G2 *) - * Dec Ctx * Dec list - | BlockDef of string * mid option * cid list - (* %block l = (l1 | ... | ln) *) - | SkoDec of string * mid option * int (* sa: K : kind or *) - * Exp * Uni (* sc: A : type *) - - and Ancestor = (* Ancestor of d or a *) - Anc of cid option * int * cid option (* head(expand(d)), height, head(expand[height](d)) *) - (* NONE means expands to {x:A}B *) - - datatype StrDec = (* Structure declaration *) - StrDec of string * mid option - - (* Form of constant declaration *) - datatype ConDecForm = - FromCS (* from constraint domain *) - | Ordinary (* ordinary declaration *) - | Clause (* %clause declaration *) - - (* Type abbreviations *) - type dctx = Dec Ctx (* G = . | G,D *) - type eclo = Exp * Sub (* Us = U[s] *) - type bclo = Block * Sub (* Bs = B[s] *) - type cnstr = Cnstr ref - - exception Error of string (* raised if out of space *) - - (* standard operations on foreign expressions *) - structure FgnExpStd : sig - (* convert to internal syntax *) - structure ToInternal : FGN_OPN where type arg = unit - where type result = Exp - - (* apply function to subterms *) - structure Map : FGN_OPN where type arg = Exp -> Exp - where type result = Exp - - (* apply function to subterms, for effect *) - structure App : FGN_OPN where type arg = Exp -> unit - where type result = unit - - (* test for equality *) - structure EqualTo : FGN_OPN where type arg = Exp - where type result = bool - - (* unify with another term *) - structure UnifyWith : FGN_OPN where type arg = Dec Ctx * Exp - where type result = FgnUnify - - (* fold a function over the subterms *) - val fold : (csid * FgnExp) -> (Exp * 'a -> 'a) -> 'a -> 'a - end - - (* standard operations on foreign constraints *) - structure FgnCnstrStd : sig - (* convert to internal syntax *) - structure ToInternal : FGN_OPN where type arg = unit - where type result = (Dec Ctx * Exp) list - - (* awake *) - structure Awake : FGN_OPN where type arg = unit - where type result = bool - - (* simplify *) - structure Simplify : FGN_OPN where type arg = unit - where type result = bool - end - - val conDecName : ConDec -> string - val conDecParent : ConDec -> mid option - val conDecImp : ConDec -> int - val conDecStatus : ConDec -> Status - val conDecType : ConDec -> Exp - val conDecBlock : ConDec -> dctx * Dec list - val conDecUni : ConDec -> Uni - - val strDecName : StrDec -> string - val strDecParent : StrDec -> mid option - - val sgnReset : unit -> unit - val sgnSize : unit -> cid * mid - - val sgnAdd : ConDec -> cid - val sgnLookup: cid -> ConDec - val sgnApp : (cid -> unit) -> unit - - val sgnStructAdd : StrDec -> mid - val sgnStructLookup : mid -> StrDec - - val constType : cid -> Exp (* type of c or d *) - val constDef : cid -> Exp (* definition of d *) - val constImp : cid -> int - val constStatus : cid -> Status - val constUni : cid -> Uni - val constBlock : cid -> dctx * Dec list - - (* Declaration Contexts *) - - val ctxDec : dctx * int -> Dec (* get variable declaration *) - val blockDec : dctx * Block * int -> Dec - - (* Explicit substitutions *) - - val id : Sub (* id *) - val shift : Sub (* ^ *) - val invShift : Sub (* ^-1 *) - - val bvarSub : int * Sub -> Front (* k[s] *) - val frontSub : Front * Sub -> Front (* H[s] *) - val decSub : Dec * Sub -> Dec (* x:V[s] *) - val blockSub : Block * Sub -> Block (* B[s] *) - - val comp : Sub * Sub -> Sub (* s o s' *) - val dot1 : Sub -> Sub (* 1 . (s o ^) *) - val invDot1 : Sub -> Sub (* (^ o s) o ^-1) *) - - (* EVar related functions *) - - val newEVar : dctx * Exp -> Exp (* creates X:G|-V, [] *) - val newAVar : unit -> Exp (* creates A (bare) *) - val newTypeVar : dctx -> Exp (* creates X:G|-type, [] *) - val newLVar : Sub * (cid * Sub) -> Block - (* creates B:(l[^k],t) *) - - (* Definition related functions *) - val headOpt : Exp -> Head option - val ancestor : Exp -> Ancestor - val defAncestor : cid -> Ancestor - - (* Type related functions *) - - (* Not expanding type definitions *) - val targetHeadOpt : Exp -> Head option (* target type family or NONE *) - val targetHead : Exp -> Head (* target type family *) - - (* Expanding type definitions *) - val targetFamOpt : Exp -> cid option (* target type family or NONE *) - val targetFam : Exp -> cid (* target type family *) - - (* Used in Flit *) - val rename : cid * string -> unit - -end; (* signature INTSYN *) diff --git a/tests/examplefiles/irb_heredoc b/tests/examplefiles/irb_heredoc deleted file mode 100644 index 3dc205e3..00000000 --- a/tests/examplefiles/irb_heredoc +++ /dev/null @@ -1,8 +0,0 @@ -irb(main):001:0> puts < nil -irb(main):005:0> diff --git a/tests/examplefiles/irc.lsp b/tests/examplefiles/irc.lsp deleted file mode 100755 index 6f45976a..00000000 --- a/tests/examplefiles/irc.lsp +++ /dev/null @@ -1,214 +0,0 @@ -#!/usr/bin/env newlisp - -;; @module IRC -;; @description a basic irc library -;; @version early alpha! 0.1 2011-10-31 14:21:26 -;; @author cormullion -;; Usage: -;; (IRC:init "newlithper") ; a username/nick (not that one obviously :-) -;; (IRC:connect "irc.freenode.net" 6667) ; irc/server -;; (IRC:join-channel {#newlisp}) ; join a room -;; either (IRC:read-irc-loop) ; loop - monitor only, no input -;; or (IRC:session) ; a command-line session, end with /QUIT - -(context 'IRC) - (define Inickname) - (define Ichannels) - (define Iserver) - (define Iconnected) - (define Icallbacks '()) - (define Idle-time 400) ; seconds - (define Itime-stamp) ; time since last message was processed - -(define (register-callback callback-name callback-function) - (println {registering callback for } callback-name { : } (sym (term callback-function) (prefix callback-function))) - (push (list callback-name (sym (term callback-function) (prefix callback-function))) Icallbacks)) - -(define (do-callback callback-name data) - (when (set 'func (lookup callback-name Icallbacks)) ; find first callback - (if-not (catch (apply func (list data)) 'error) - (println {error in callback } callback-name {: } error)))) - -(define (do-callbacks callback-name data) - (dolist (rf (ref-all callback-name Icallbacks)) - (set 'callback-entry (Icallbacks (first rf))) - (when (set 'func (last callback-entry)) - (if-not (catch (apply func (list data)) 'error) - (println {error in callback } callback-name {: } error))))) - -(define (init str) - (set 'Inickname str) - (set 'Iconnected nil) - (set 'Ichannels '()) - (set 'Itime-stamp (time-of-day))) - -(define (connect server port) - (set 'Iserver (net-connect server port)) - (net-send Iserver (format "USER %s %s %s :%s\r\n" Inickname Inickname Inickname Inickname)) - (net-send Iserver (format "NICK %s \r\n" Inickname)) - (set 'Iconnected true) - (do-callbacks "connect" (list (list "server" server) (list "port" port)))) - -(define (identify password) - (net-send Iserver (format "PRIVMSG nickserv :identify %s\r\n" password))) - -(define (join-channel channel) - (when (net-send Iserver (format "JOIN %s \r\n" channel)) - (push channel Ichannels) - (do-callbacks "join-channel" (list (list "channel" channel) (list "nickname" Inickname))))) - -(define (part chan) - (if-not (empty? chan) - ; leave specified - (begin - (net-send Iserver (format "PART %s\r\n" chan)) - (replace channel Ichannels) - (do-callbacks "part" (list (list "channel" channel)))) - ; leave all - (begin - (dolist (channel Ichannels) - (net-send Iserver (format "PART %s\r\n" channel)) - (replace channel Ichannels) - (do-callbacks "part" (list (list "channel" channel))))))) - -(define (do-quit message) - (do-callbacks "quit" '()) ; chance to do stuff before quit... - (net-send Iserver (format "QUIT :%s\r\n" message)) - (sleep 1000) - (set 'Ichannels '()) - (close Iserver) - (set 'Iconnected nil)) - -(define (privmsg user message) - (net-send Iserver (format "PRIVMSG %s :%s\r\n" user message))) - -(define (notice user message) - (net-send Iserver (format "NOTICE %s :%s\r\n" user message))) - -(define (send-to-server message (channel nil)) - (cond - ((starts-with message {/}) ; default command character - (set 'the-message (replace "^/" (copy message) {} 0)) ; keep original - (net-send Iserver (format "%s \r\n" the-message)) ; send it - ; do a quit - (if (starts-with (lower-case the-message) "quit") - (do-quit { enough}))) - (true - (if (nil? channel) - ; say to all channels - (dolist (c Ichannels) - (net-send Iserver (format "PRIVMSG %s :%s\r\n" c message))) - ; say to specified channel - (if (find channel Ichannels) - (net-send Iserver (format "PRIVMSG %s :%s\r\n" channel message)))))) - (do-callbacks "send-to-server" (list (list "channel" channel) (list "message" message)))) - -(define (process-command sender command text) - (cond - ((= sender "PING") - (net-send Iserver (format "PONG %s\r\n" command))) - ((or (= command "NOTICE") (= command "PRIVMSG")) - (process-message sender command text)) - ((= command "JOIN") - (set 'username (first (clean empty? (parse sender {!|:} 0)))) - (set 'channel (last (clean empty? (parse sender {!|:} 0)))) - (println {username } username { joined } channel) - (do-callbacks "join" (list (list "channel" channel) (list "username" username)))) - (true - nil))) - -(define (process-message sender command text) - (let ((username {} target {} message {})) - (set 'username (first (clean empty? (parse sender {!|:} 0)))) - (set 'target (trim (first (clean empty? (parse text {!|:} 0))))) - (set 'message (slice text (+ (find {:} text) 1))) - (cond - ((starts-with message "\001") - (process-ctcp username target message)) - ((find target Ichannels) - (cond - ((= command {PRIVMSG}) - (do-callbacks "channel-message" (list (list "channel" target) (list "username" username) (list "message" message)))) - ((= command {NOTICE}) - (do-callbacks "channel-notice" (list (list "channel" target) (list "username" username) (list "message" message)))))) - ((= target Inickname) - (cond - ((= command {PRIVMSG}) - (do-callbacks "private-message" (list (list "username" username) (list "message" message)))) - ((= command {NOTICE}) - (do-callbacks "private-notice" (list (list "username" username) (list "message" message)))))) - (true - nil)))) - -(define (process-ctcp username target message) - (cond - ((starts-with message "\001VERSION\001") - (net-send Iserver (format "NOTICE %s :\001VERSION %s\001\r\n" username version))) - ((starts-with message "\001PING") - (set 'data (first (rest (clean empty? (parse message { } 0))))) - (set 'data (trim data "\001" "\001")) - (net-send Iserver (format "NOTICE %s :\001PING %s\001\r\n" username data))) - ((starts-with message "\001ACTION") - (set 'data (first (rest (clean empty? (parse message { } 0))))) - (set 'data (join data { })) - (set 'data (trim data "\001" "\001")) - (if (find target Ichannels) - (do-callbacks "channel-action" (list (list "username" username) (list "message" message)))) - (if (= target Inickname) - (do-callbacks "private-action" (list (list "username" username) (list "message" message))))) - ((starts-with message "\001TIME\001") - (net-send Iserver (format "NOTICE %s:\001TIME :%s\001\r\n" username (date)))))) - -(define (parse-buffer raw-buffer) - (let ((messages (clean empty? (parse raw-buffer "\r\n" 0))) - (sender {} command {} text {})) - ; check for elapsed time since last activity - (when (> (sub (time-of-day) Itime-stamp) (mul Idle-time 1000)) - (do-callbacks "idle-event") - (set 'Itime-stamp (time-of-day))) - (dolist (message messages) - (set 'message-parts (parse message { })) - (unless (empty? message-parts) - (set 'sender (first message-parts)) - (catch (set 'command (first (rest message-parts))) 'error) - (catch (set 'text (join (rest (rest message-parts)) { })) 'error)) - (process-command sender command text)))) - -(define (read-irc) - (let ((buffer {})) - (when (!= (net-peek Iserver) 0) - (net-receive Iserver buffer 8192 "\n") - (unless (empty? buffer) - (parse-buffer buffer))))) - -(define (read-irc-loop) ; monitoring - (let ((buffer {})) - (while Iconnected - (read-irc) - (sleep 1000)))) - -(define (print-raw-message data) ; example of using a callback - (set 'raw-data (lookup "message" data)) - (set 'channel (lookup "channel" data)) - (set 'message-text raw-data) - (println (date (date-value) 0 {%H:%M:%S }) username {> } message-text)) - -(define (print-outgoing-message data) - (set 'raw-data (lookup "message" data)) - (set 'channel (lookup "channel" data)) - (set 'message-text raw-data) - (println (date (date-value) 0 {%H:%M:%S }) Inickname {> } message-text)) - -(define (session); interactive terminal - ; must add callbacks to display messages - (register-callback "channel-message" 'print-raw-message) - (register-callback "send-to-server" 'print-outgoing-message) - (while Iconnected - (while (zero? (peek 0)) - (read-irc)) - (send-to-server (string (read-line 0)))) - (println {finished session } (date)) - (exit)) - -; end of IRC code - diff --git a/tests/examplefiles/java.properties b/tests/examplefiles/java.properties deleted file mode 100644 index 72ad0f96..00000000 --- a/tests/examplefiles/java.properties +++ /dev/null @@ -1,16 +0,0 @@ -foo = bar -foo: bar -foo.oof: \ - bar=baz; \ - asdf - -// comment -# comment -; comment - -x:a\ -b -x: a \ - b - -x = \ diff --git a/tests/examplefiles/jbst_example1.jbst b/tests/examplefiles/jbst_example1.jbst deleted file mode 100644 index 0e7d014f..00000000 --- a/tests/examplefiles/jbst_example1.jbst +++ /dev/null @@ -1,28 +0,0 @@ -<%@ Control Name="MyApp.MyJbstControl" Language="JavaScript" %> - - - -<%! - /* initialization code block, executed only once as control is loaded */ - /* alternate syntax to script block above */ - this.myInitTime = this.generateValue(); -%> - -<% - /* data binding code block, executed each time as control is data bound */ - this.myBindTime = this.generateValue(); -%> - -<%-- JBST Comment --%> -<%= this.myBindTime /* data binding expression */ %> -<%= this.myInitTime /* data binding expression */ %> - - -<%$ Resources: localizationKey %><%-- JBST globalization--%> \ No newline at end of file diff --git a/tests/examplefiles/jbst_example2.jbst b/tests/examplefiles/jbst_example2.jbst deleted file mode 100644 index 2b5e0489..00000000 --- a/tests/examplefiles/jbst_example2.jbst +++ /dev/null @@ -1,45 +0,0 @@ -<%@ Control Name="Foo.MyZebraList" Language="JavaScript" %> - - - -
-

<%= this.data.title %> as of <%= this.formatTime(this.data.timestamp) %>!

-

<%= this.data.description %>

-
    - - - - -
  • - <%= this.data.label %> (<%= this.index+1 %> of <%= this.count %>) -
  • -
    - -
-
\ No newline at end of file diff --git a/tests/examplefiles/jinjadesignerdoc.rst b/tests/examplefiles/jinjadesignerdoc.rst deleted file mode 100644 index b4b6c44b..00000000 --- a/tests/examplefiles/jinjadesignerdoc.rst +++ /dev/null @@ -1,713 +0,0 @@ -====================== -Designer Documentation -====================== - -This part of the Jinja documentaton is meant for template designers. - -Basics -====== - -The Jinja template language is designed to strike a balance between content -and application logic. Nevertheless you can use a python like statement -language. You don't have to know how Python works to create Jinja templates, -but if you know it you can use some additional statements you may know from -Python. - -Here is a small example template: - -.. sourcecode:: html+jinja - - - - - My Webpage - - - - -

My Webpage

- {{ variable }} - - - -This covers the default settings. The application developer might have changed -the syntax from ``{% foo %}`` to ``<% foo %>`` or something similar. This -documentation just covers the default values. - -A variable looks like ``{{ foobar }}`` where foobar is the variable name. Inside -of statements (``{% some content here %}``) variables are just normal names -without the braces around it. In fact ``{{ foobar }}`` is just an alias for -the statement ``{% print foobar %}``. - -Variables are coming from the context provided by the application. Normally there -should be a documentation regarding the context contents but if you want to know -the content of the current context, you can add this to your template: - -.. sourcecode:: html+jinja - -
{{ debug()|e }}
- -A context isn't flat which means that each variable can has subvariables, as long -as it is representable as python data structure. You can access attributes of -a variable using the dot and bracket operators. The following examples show -this: - -.. sourcecode:: jinja - - {{ user.username }} - is the same as - {{ user['username'] }} - you can also use a variable to access an attribute: - {{ users[current_user].username }} - If you have numerical indices you have to use the [] syntax: - {{ users[0].username }} - -Filters -======= - -In the examples above you might have noticed the pipe symbols. Pipe symbols tell -the engine that it has to apply a filter on the variable. Here is a small example: - -.. sourcecode:: jinja - - {{ variable|replace('foo', 'bar')|escape }} - -If you want, you can also put whitespace between the filters. - -This will look for a variable `variable`, pass it to the filter `replace` -with the arguments ``'foo'`` and ``'bar'``, and pass the result to the filter -`escape` that automatically XML-escapes the value. The `e` filter is an alias for -`escape`. Here is the complete list of supported filters: - -[[list_of_filters]] - -.. admonition:: note - - Filters have a pretty low priority. If you want to add fitered values - you have to put them into parentheses. The same applies if you want to access - attributes: - - .. sourcecode:: jinja - - correct: - {{ (foo|filter) + (bar|filter) }} - wrong: - {{ foo|filter + bar|filter }} - - correct: - {{ (foo|filter).attribute }} - wrong: - {{ foo|filter.attribute }} - -Tests -===== - -You can use the `is` operator to perform tests on a value: - -.. sourcecode:: jinja - - {{ 42 is numeric }} -> true - {{ "foobar" is numeric }} -> false - {{ 'FOO' is upper }} -> true - -These tests are especially useful when used in `if` conditions. - -[[list_of_tests]] - -Global Functions -================ - -Test functions and filter functions live in their own namespace. Global -functions not. They behave like normal objects in the context. Beside the -functions added by the application or framewhere there are two functions -available per default: - -`range` - - Works like the python `range function`_ just that it doesn't support - ranges greater than ``1000000``. - -`debug` - - Function that outputs the contents of the context. - -Loops -===== - -To iterate over a sequence, you can use the `for` loop. It basically looks like a -normal Python `for` loop and works pretty much the same: - -.. sourcecode:: html+jinja - -

Members

-
    - {% for user in users %} -
  • {{ loop.index }} / {{ loop.length }} - {{ user.username|escape }}
  • - {% else %} -
  • no users found
  • - {% endfor %} -
- -*Important* Contrary to Python is the optional ``else`` block only -executed if there was no iteration because the sequence was empty. - -Inside of a `for` loop block you can access some special variables: - -+----------------------+----------------------------------------+ -| Variable | Description | -+======================+========================================+ -| `loop.index` | The current iteration of the loop. | -+----------------------+----------------------------------------+ -| `loop.index0` | The current iteration of the loop, | -| | starting counting by 0. | -+----------------------+----------------------------------------+ -| `loop.revindex` | The number of iterations from the end | -| | of the loop. | -+----------------------+----------------------------------------+ -| `loop.revindex0` | The number of iterations from the end | -| | of the loop, starting counting by 0. | -+----------------------+----------------------------------------+ -| `loop.first` | True if first iteration. | -+----------------------+----------------------------------------+ -| `loop.last` | True if last iteration. | -+----------------------+----------------------------------------+ -| `loop.even` | True if current iteration is even. | -+----------------------+----------------------------------------+ -| `loop.odd` | True if current iteration is odd. | -+----------------------+----------------------------------------+ -| `loop.length` | Total number of items in the sequence. | -+----------------------+----------------------------------------+ -| `loop.parent` | The context of the parent loop. | -+----------------------+----------------------------------------+ - -Loops also support recursion. Let's assume you have a sitemap where each item -might have a number of child items. A template for that could look like this: - -.. sourcecode:: html+jinja - -

Sitemap -
    - {% for item in sitemap recursive %} -
  • {{ item.title|e }} - {% if item.children %}
      {{ loop(item.children) }}
    {% endif %}
  • - {% endfor %} -
- -What happens here? Basically the first thing that is different to a normal -loop is the additional ``recursive`` modifier in the `for`-loop declaration. -It tells the template engine that we want recursion. If recursion is enabled -the special `loop` variable is callable. If you call it with a sequence it will -automatically render the loop at that position with the new sequence as argument. - -Cycling -======= - -Sometimes you might want to have different text snippets for each row in a list, -for example to have alternating row colors. You can easily do this by using the -``{% cycle %}`` tag: - -.. sourcecode:: html+jinja - -
    - {% for message in messages %} -
  • {{ message|e }}
  • - {% endfor %} -
- -Each time Jinja encounters a `cycle` tag it will cycle through the list -of given items and return the next one. If you pass it one item jinja assumes -that this item is a sequence from the context and uses this: - -.. sourcecode:: html+jinja - -
  • ...
  • - -Conditions -========== - -Jinja supports Python-like `if` / `elif` / `else` constructs: - -.. sourcecode:: jinja - - {% if user.active %} - user {{ user.name|e }} is active. - {% elif user.deleted %} - user {{ user.name|e }} was deleted some time ago. - {% else %} - i don't know what's wrong with {{ user.username|e }} - {% endif %} - -If the user is active the first block is rendered. If not and the user was -deleted the second one, in all other cases the third one. - -You can also use comparison operators: - -.. sourcecode:: html+jinja - - {% if amount < 0 %} - {{ amount }} - {% else %} - {{ amount }} - {% endif %} - -.. admonition:: Note - - Of course you can use `or` / `and` and parentheses to create more complex - conditions, but usually the logic is already handled in the application and - you don't have to create such complex constructs in the template code. However - in some situations it might be a good thing to have the abilities to create - them. - -Operators -========= - -Inside ``{{ variable }}`` blocks, `if` conditions and many other parts you can -can use expressions. In expressions you can use any of the following operators: - - ======= =================================================================== - ``+`` add the right operand to the left one. - ``{{ 1 + 2 }}`` would return ``3``. - ``-`` subtract the right operand from the left one. - ``{{ 1 - 1 }}`` would return ``0``. - ``/`` divide the left operand by the right one. - ``{{ 1 / 2 }}`` would return ``0.5``. - ``*`` multiply the left operand with the right one. - ``{{ 2 * 2 }}`` would return ``4``. - ``**`` raise the left operand to the power of the right - operand. ``{{ 2**3 }}`` would return ``8``. - ``in`` perform sequence membership test. ``{{ 1 in [1,2,3] }}`` would - return true. - ``is`` perform a test on the value. See the section about - tests for more information. - ``|`` apply a filter on the value. See the section about - filters for more information. - ``and`` return true if the left and the right operand is true. - ``or`` return true if the left or the right operand is true. - ``not`` negate a statement (see below) - ``()`` call a callable: ``{{ user.get_username() }}``. Inside of the - parentheses you can use variables: ``{{ user.get(username) }}``. - ======= =================================================================== - -Note that there is no support for any bit operations or something similar. - -* special note regarding `not`: The `is` and `in` operators support negation - using an infix notation too: ``foo is not bar`` and ``foo not in bar`` - instead of ``not foo is bar`` and ``not foo in bar``. All other expressions - require a prefix notation: ``not (foo and bar)``. - -Boolean Values -============== - -In If-Conditions Jinja performs a boolean check. All empty values (eg: empty -lists ``[]``, empty dicts ``{}`` etc) evaluate to `false`. Numbers that are -equal to `0`/`0.00` are considered `false` too. The boolean value of other -objects depends on the behavior the application developer gave it. Usually -items are `true`. - -Here some examples that should explain it: - -.. sourcecode:: jinja - - {% if [] %} - will always be false because it's an empty list - - {% if {} %} - false too. - - {% if ['foo'] %} - this is true. Because the list is not empty. - - {% if "foobar" %} - this is also true because the string is not empty. - -Slicing -======= - -Some objects support slicing operations. For example lists: - -.. sourcecode:: jinja - - {% for item in items[:5] %} - This will only iterate over the first 5 items of the list - - {% for item in items[5:10] %} - This will only iterate from item 5 to 10. - - {% for item in items[:10:2] %} - This will only yield items from start to ten and only returing - even items. - -For more informations about slicing have a look at the `slicing chapter`_ -in the "Dive into Python" e-book. - -Macros -====== - -If you want to use a partial template in more than one place, you might want to -create a macro from it: - -.. sourcecode:: html+jinja - - {% macro show_user user %} -

    {{ user.name|e }}

    -
    - {{ user.description }} -
    - {% endmacro %} - -Now you can use it from everywhere in the code by passing it an item: - -.. sourcecode:: jinja - - {% for user in users %} - {{ show_user(user) }} - {% endfor %} - -You can also specify more than one value: - -.. sourcecode:: html+jinja - - {% macro show_dialog title, text %} -
    -

    {{ title|e }}

    -
    {{ text|e }}
    -
    - {% endmacro %} - - {{ show_dialog('Warning', 'something went wrong i guess') }} - -Inheritance -=========== - -The most powerful part of Jinja is template inheritance. Template inheritance -allows you to build a base "skeleton" template that contains all the common -elements of your site and defines **blocks** that child templates can override. - -Sounds complicated but is very basic. It's easiest to understand it by starting -with an example. - -Base Template -------------- - -This template, which we'll call ``base.html``, defines a simple HTML skeleton -document that you might use for a simple two-column page. It's the job of -"child" templates to fill the empty blocks with content: - -.. sourcecode:: html+jinja - - - - - - {% block title %}{% endblock %} - My Webpage - {% block html_head %}{% endblock %} - - -
    - {% block content %}{% endblock %} -
    - - - - -In this example, the ``{% block %}`` tags define four blocks that child templates -can fill in. All the `block` tag does is to tell the template engine that a -child template may override those portions of the template. - -Child Template --------------- - -A child template might look like this: - -.. sourcecode:: html+jinja - - {% extends "base.html" %} - {% block title %}Index{% endblock %} - - {% block html_head %} - - {% endblock %} - - {% block content %} -

    Index

    -

    - Welcome on my awsome homepage. -

    - {% endblock %} - -The ``{% extends %}`` tag is the key here. It tells the template engine that -this template "extends" another template. When the template system evaluates -this template, first it locates the parent. - -The filename of the template depends on the template loader. For example the -``FileSystemLoader`` allows you to access other templates by giving the -filename. You can access templates in subdirectories with an slash: - -.. sourcecode:: jinja - - {% extends "layout/default.html" %} - -But this behavior can depend on the application using Jinja. - -Note that since the child template didn't define the ``footer`` block, the -value from the parent template is used instead. - -.. admonition:: Note - - You can't define multiple ``{% block %}`` tags with the same name in the - same template. This limitation exists because a block tag works in "both" - directions. That is, a block tag doesn't just provide a hole to fill - it - also defines the content that fills the hole in the *parent*. If there were - two similarly-named ``{% block %}`` tags in a template, that template's - parent wouldn't know which one of the blocks' content to use. - -Template Inclusion -================== - -You can load another template at a given position using ``{% include %}``. -Usually it's a better idea to use inheritance but if you for example want to -load macros, `include` works better than `extends`: - -.. sourcecode:: jinja - - {% include "myhelpers.html" %} - {{ my_helper("foo") }} - -If you define a macro called ``my_helper`` in ``myhelpers.html``, you can now -use it from the template as shown above. - -Filtering Blocks -================ - -Sometimes it could be a good idea to filter a complete block of text. For -example, if you want to escape some html code: - -.. sourcecode:: jinja - - {% filter escape %} - - goes here - - {% endfilter %} - -Of course you can chain filters too: - -.. sourcecode:: jinja - - {% filter lower|escape %} - SOME TEXT - {% endfilter %} - -returns ``"<b>some text</b>"``. - -Defining Variables -================== - -You can also define variables in the namespace using the ``{% set %}`` tag: - -.. sourcecode:: jinja - - {% set foo = 'foobar' %} - {{ foo }} - -This should ouput ``foobar``. - -Scopes -====== - -Jinja has multiple scopes. A scope is something like a new transparent foil on -a stack of foils. You can only write to the outermost foil but read all of them -since you can look through them. If you remove the top foil all data on that -foil disappears. Some tags in Jinja add a new layer to the stack. Currently -these are `block`, `for`, `macro` and `filter`. This means that variables and -other elements defined inside a macro, loop or some of the other tags listed -above will be only available in that block. Here an example: - -.. sourcecode:: jinja - - {% macro angryhello name %} - {% set angryname = name|upper %} - Hello {{ name }}. Hello {{ name }}! - HELLO {{ angryname }}!!!!!!111 - {% endmacro %} - -The variable ``angryname`` just exists inside the macro, not outside it. - -Defined macros appear on the context as variables. Because of this, they are -affected by the scoping too. A macro defined inside of a macro is just available -in those two macros (the macro itself and the macro it's defined in). For `set` -and `macro` two additional rules exist: If a macro is defined in an extended -template but outside of a visible block (thus outside of any block) will be -available in all blocks below. This allows you to use `include` statements to -load often used macros at once. - -Undefined Variables -=================== - -If you have already worked with python you probably know about the fact that -undefined variables raise an exception. This is different in Jinja. There is a -special value called `undefined` that represents values that do not exist. - -This special variable works complete different from any variables you maybe -know. If you print it using ``{{ variable }}`` it will not appear because it's -literally empty. If you try to iterate over it, it will work. But no items -are returned. Comparing this value to any other value results in `false`. -Even if you compare it to itself: - -.. sourcecode:: jinja - - {{ undefined == undefined }} - will return false. Not even undefined is undefined :) - Use `is defined` / `is not defined`: - - {{ undefined is not defined }} - will return true. - -There are also some additional rules regarding this special value. Any -mathematical operators (``+``, ``-``, ``*``, ``/``) return the operand -as result: - -.. sourcecode:: jinja - - {{ undefined + "foo" }} - returns "foo" - - {{ undefined - 42 }} - returns 42. Note: not -42! - -In any expression `undefined` evaluates to `false`. It has no length, all -attribute calls return undefined, calling too: - -.. sourcecode:: jinja - - {{ undefined.attribute().attribute_too[42] }} - still returns `undefined`. - -Escaping -======== - -Sometimes you might want to add Jinja syntax elements into the template -without executing them. In that case you have quite a few possibilities. - -For small parts this might be a good way: - -.. sourcecode:: jinja - - {{ "{{ foo }} is variable syntax and {% foo %} is block syntax" }} - -When you have multiple elements you can use the ``raw`` block: - -.. sourcecode:: jinja - - {% raw %} - Filtering blocks works like this in Jinja: - {% filter escape %} - - goes here - - {% endfilter %} - {% endraw %} - -Reserved Keywords -================= - -Jinja has some keywords you cannot use a variable names. This limitation -exists to make look coherent. Syntax highlighters won't mess things up and -you will don't have unexpected output. - -The following keywords exist and cannot be used as identifiers: - - `and`, `block`, `cycle`, `elif`, `else`, `endblock`, `endfilter`, - `endfor`, `endif`, `endmacro`, `endraw`, `endtrans`, `extends`, `filter`, - `for`, `if`, `in`, `include`, `is`, `macro`, `not`, `or`, `pluralize`, - `raw`, `recursive`, `set`, `trans` - -If you want to use such a name you have to prefix or suffix it or use -alternative names: - -.. sourcecode:: jinja - - {% for macro_ in macros %} - {{ macro_('foo') }} - {% endfor %} - -If future Jinja releases add new keywords those will be "light" keywords which -means that they won't raise an error for several releases but yield warnings -on the application side. But it's very unlikely that new keywords will be -added. - -Internationalization -==================== - -If the application is configured for i18n, you can define translatable blocks -for translators using the `trans` tag or the special underscore function: - -.. sourcecode:: jinja - - {% trans %} - this is a translatable block - {% endtrans %} - - {% trans "This is a translatable string" %} - - {{ _("This is a translatable string") }} - -The latter one is useful if you want translatable arguments for filters etc. - -If you want to have plural forms too, use the `pluralize` block: - -.. sourcecode:: jinja - - {% trans users=users %} - One user found. - {% pluralize %} - {{ users }} users found. - {% endtrans %} - - {% trans first=(users|first).username|escape, user=users|length %} - one user {{ first }} found. - {% pluralize users %} - {{ users }} users found, the first one is called {{ first }}. - {% endtrans %} - -If you have multiple arguments, the first one is assumed to be the indicator (the -number that is used to determine the correct singular or plural form. If you -don't have the indicator variable on position 1 you have to tell the `pluralize` -tag the correct variable name. - -Inside translatable blocks you cannot use blocks or expressions (however you can -still use the ``raw`` block which will work as expected). The variable -print syntax (``{{ variablename }}``) is the only way to insert the variables -defined in the ``trans`` header. Filters must be applied in the header. - -.. admonition:: note - - Please make sure that you always use pluralize blocks where required. - Many languages have more complex plural forms than the English language. - - Never try to workaround that issue by using something like this: - - .. sourcecode:: jinja - - {% if count != 1 %} - {{ count }} users found. - {% else %} - one user found. - {% endif %} - -.. _slicing chapter: http://diveintopython.org/native_data_types/lists.html#odbchelper.list.slice -.. _range function: http://docs.python.org/tut/node6.html#SECTION006300000000000000000 diff --git a/tests/examplefiles/json.lasso b/tests/examplefiles/json.lasso deleted file mode 100644 index 72926112..00000000 --- a/tests/examplefiles/json.lasso +++ /dev/null @@ -1,301 +0,0 @@ - - // - // - // - -If: (Lasso_TagExists: 'Encode_JSON') == False; - - Define_Tag: 'JSON', -Namespace='Encode_', -Required='value', -Optional='options'; - - Local: 'escapes' = Map('\\' = '\\', '"' = '"', '\r' = 'r', '\n' = 'n', '\t' = 't', '\f' = 'f', '\b' = 'b'); - Local: 'output' = ''; - Local: 'newoptions' = (Array: -Internal); - If: !(Local_Defined: 'options') || (#options->(IsA: 'array') == False); - Local: 'options' = (Array); - /If; - If: (#options >> -UseNative) || (Params >> -UseNative); - #newoptions->(Insert: -UseNative); - /If; - If: (#options >> -NoNative) || (Params >> -NoNative); - #newoptions->(Insert: -NoNative); - /If; - If: (#options !>> -UseNative) && ((#value->(IsA: 'set')) || (#value->(IsA: 'list')) || (#value->(IsA: 'queue')) || (#value->(IsA: 'priorityqueue')) || (#value->(IsA: 'stack'))); - #output += (Encode_JSON: Array->(insertfrom: #value->iterator) &, -Options=#newoptions); - Else: (#options !>> -UseNative) && (#value->(IsA: 'pair')); - #output += (Encode_JSON: (Array: #value->First, #value->Second)); - Else: (#options !>> -Internal) && (#value->(Isa: 'array') == False) && (#value->(IsA: 'map') == False); - #output += '[' + (Encode_JSON: #value, -Options=#newoptions) + ']'; - Else: (#value->(IsA: 'literal')); - #output += #value; - Else: (#value->(IsA: 'string')); - #output += '"'; - Loop: (#value->Length); - Local('character' = #value->(Get: Loop_Count)); - #output->(Append: - (Match_RegExp('[\\x{0020}-\\x{21}\\x{23}-\\x{5b}\\x{5d}-\\x{10fff}]') == #character) ? #character | - '\\' + (#escapes->(Contains: #character) ? #escapes->(Find: #character) | 'u' + String(Encode_Hex(#character))->PadLeading(4, '0')&) - ); - /Loop; - #output += '"'; - Else: (#value->(IsA: 'integer')) || (#value->(IsA: 'decimal')) || (#value->(IsA: 'boolean')); - #output += (String: #value); - Else: (#value->(IsA: 'null')); - #output += 'null'; - Else: (#value->(IsA: 'date')); - If: #value->gmt; - #output += '"' + #value->(format: '%QT%TZ') + '"'; - Else; - #output += '"' + #value->(format: '%QT%T') + '"'; - /If; - Else: (#value->(IsA: 'array')); - #output += '['; - Iterate: #value, (Local: 'temp'); - #output += (Encode_JSON: #temp, -Options=#newoptions); - If: #value->Size != Loop_Count; - #output += ', '; - /If; - /Iterate; - #output += ']'; - Else: (#value->(IsA: 'object')); - #output += '{'; - Iterate: #value, (Local: 'temp'); - #output += #temp->First + ': ' + (Encode_JSON: #temp->Second, -Options=#newoptions); - If: (#value->Size != Loop_Count); - #output += ', '; - /If; - /Iterate; - #output += '}'; - Else: (#value->(IsA: 'map')); - #output += '{'; - Iterate: #value, (Local: 'temp'); - #output += (Encode_JSON: #temp->First, -Options=#newoptions) + ': ' + (Encode_JSON: #temp->Second, -Options=#newoptions); - If: (#value->Size != Loop_Count); - #output += ', '; - /If; - /Iterate; - #output += '}'; - Else: (#value->(IsA: 'client_ip')) || (#value->(IsA: 'client_address')); - #output += (Encode_JSON: (String: #value), -Options=#newoptions); - Else: (#options !>> -UseNative) && (#value->(IsA: 'set')) || (#value->(IsA: 'list')) || (#value->(IsA: 'queue')) || (#value->(IsA: 'priorityqueue')) || (#value->(IsA: 'stack')); - #output += (Encode_JSON: Array->(insertfrom: #value->iterator) &, -Options=#newoptions); - Else: (#options !>> -NoNative); - #output += (Encode_JSON: (Map: '__jsonclass__'=(Array:'deserialize',(Array:'' + #value->Serialize + '')))); - /If; - Return: @#output; - - /Define_Tag; - -/If; - -If: (Lasso_TagExists: 'Decode_JSON') == False; - - Define_Tag: 'JSON', -Namespace='Decode_', -Required='value'; - - (#value == '') ? Return: Null; - - Define_Tag: 'consume_string', -Required='ibytes'; - Local: 'unescapes' = (map: 34 = '"', 92 = '\\', 98 = '\b', 102 = '\f', 110 = '\n', 114 = '\r', 116 = '\t'); - Local: 'temp' = 0, 'obytes' = Bytes; - While: ((#temp := #ibytes->export8bits) != 34); // '"' - If: (#temp === 92); // '\' - #temp = #ibytes->export8bits; - If: (#temp === 117); // 'u' - #obytes->(ImportString: (Decode_Hex: (String: #ibytes->(GetRange: #ibytes->Position + 1, 4)))->(ExportString: 'UTF-16'), 'UTF-8'); - #ibytes->(SetPosition: #ibytes->Position + 4); - Else; - If: (#unescapes->(Contains: #temp)); - #obytes->(ImportString: #unescapes->(Find: #temp), 'UTF-8'); - Else; - #obytes->(Import8Bits: #temp); - /If; - /If; - Else; - #obytes->(Import8Bits: #temp); - /If; - /While; - Local('output' = #obytes->(ExportString: 'UTF-8')); - If: #output->(BeginsWith: '') && #output->(EndsWith: ''); - Local: 'temp' = #output - '' - ''; - Local: 'output' = null; - Protect; - #output->(Deserialize: #temp); - /Protect; - Else: (Valid_Date: #output, -Format='%QT%TZ'); - Local: 'output' = (Date: #output, -Format='%QT%TZ'); - Else: (Valid_Date: #output, -Format='%QT%T'); - Local: 'output' = (Date: #output, -Format='%QT%T'); - /If; - Return: @#output; - /Define_Tag; - - Define_Tag: 'consume_token', -Required='ibytes', -required='temp'; - Local: 'obytes' = bytes->(import8bits: #temp) &; - local: 'delimit' = (array: 9, 10, 13, 32, 44, 58, 93, 125); // \t\r\n ,:]} - While: (#delimit !>> (#temp := #ibytes->export8bits)); - #obytes->(import8bits: #temp); - /While; - Local: 'output' = (String: #obytes); - If: (#output == 'true') || (#output == 'false'); - Return: (Boolean: #output); - Else: (#output == 'null'); - Return: Null; - Else: (String_IsNumeric: #output); - Return: (#output >> '.') ? (Decimal: #output) | (Integer: #output); - /If; - Return: @#output; - /Define_Tag; - - Define_Tag: 'consume_array', -Required='ibytes'; - Local: 'output' = array; - local: 'delimit' = (array: 9, 10, 13, 32, 44); // \t\r\n , - local: 'temp' = 0; - While: ((#temp := #ibytes->export8bits) != 93); // ] - If: (#delimit >> #temp); - // Discard whitespace - Else: (#temp == 34); // " - #output->(insert: (consume_string: @#ibytes)); - Else: (#temp == 91); // [ - #output->(insert: (consume_array: @#ibytes)); - Else: (#temp == 123); // { - #output->(insert: (consume_object: @#ibytes)); - Else; - #output->(insert: (consume_token: @#ibytes, @#temp)); - (#temp == 93) ? Loop_Abort; - /If; - /While; - Return: @#output; - /Define_Tag; - - Define_Tag: 'consume_object', -Required='ibytes'; - Local: 'output' = map; - local: 'delimit' = (array: 9, 10, 13, 32, 44); // \t\r\n , - local: 'temp' = 0; - local: 'key' = null; - local: 'val' = null; - While: ((#temp := #ibytes->export8bits) != 125); // } - If: (#delimit >> #temp); - // Discard whitespace - Else: (#key !== null) && (#temp == 34); // " - #output->(insert: #key = (consume_string: @#ibytes)); - #key = null; - Else: (#key !== null) && (#temp == 91); // [ - #output->(insert: #key = (consume_array: @#ibytes)); - #key = null; - Else: (#key !== null) && (#temp == 123); // { - #output->(insert: #key = (consume_object: @#ibytes)); - #key = null; - Else: (#key !== null); - #output->(insert: #key = (consume_token: @#ibytes, @#temp)); - (#temp == 125) ? Loop_abort; - #key = null; - Else; - #key = (consume_string: @#ibytes); - while(#delimit >> (#temp := #ibytes->export8bits)); - /while; - #temp != 58 ? Loop_Abort; - /If; - /While; - If: (#output >> '__jsonclass__') && (#output->(Find: '__jsonclass__')->(isa: 'array')) && (#output->(Find: '__jsonclass__')->size >= 2) && (#output->(Find: '__jsonclass__')->First == 'deserialize'); - Return: #output->(find: '__jsonclass__')->Second->First; - Else: (#output >> 'native') && (#output >> 'comment') && (#output->(find: 'comment') == 'http://www.lassosoft.com/json'); - Return: #output->(find: 'native'); - /If; - Return: @#output; - /Define_Tag; - - Local: 'ibytes' = (bytes: #value); - Local: 'start' = 1; - #ibytes->removeLeading(BOM_UTF8); - Local: 'temp' = #ibytes->export8bits; - If: (#temp == 91); // [ - Local: 'output' = (consume_array: @#ibytes); - Return: @#output; - Else: (#temp == 123); // { - Local: 'output' = (consume_object: @#ibytes); - Return: @#output; - /If; - - /Define_Tag; - -/If; - -If: (Lasso_TagExists: 'Literal') == False; - - Define_Type: 'Literal', 'String'; - /Define_Type; - -/If; - -If: (Lasso_TagExists: 'Object') == False; - - Define_Type: 'Object', 'Map'; - /Define_Type; - -/If; - -If: (Lasso_TagExists: 'JSON_RPCCall') == False; - - Define_Tag: 'RPCCall', -Namespace='JSON_', - -Required='method', - -Optional='params', - -Optional='id', - -Optional='host'; - - !(Local_Defined: 'host') ? Local: 'host' = 'http://localhost/lassoapps.8/rpc/rpc.lasso'; - !(Local_Defined: 'id') ? Local: 'id' = Lasso_UniqueID; - Local: 'request' = (Map: 'method' = #method, 'params' = #params, 'id' = #id); - Local: 'request' = (Encode_JSON: #request); - Local: 'result' = (Include_URL: #host, -PostParams=#request); - Local: 'result' = (Decode_JSON: #result); - Return: @#result; - - /Define_Tag; - -/If; - -If: (Lasso_TagExists: 'JSON_Records') == False; - - Define_Tag: 'JSON_Records', - -Optional='KeyField', - -Optional='ReturnField', - -Optional='ExcludeField', - -Optional='Fields'; - - Local: '_fields' = (Local_Defined: 'fields') && #fields->(IsA: 'array') ? #fields | Field_Names; - Fail_If: #_fields->size == 0, -1, 'No fields found for [JSON_Records]'; - Local: '_keyfield' = (Local: 'keyfield'); - If: #_fields !>> #_keyfield; - Local: '_keyfield' = (KeyField_Name); - If: #_fields !>> #_keyfield; - Local: '_keyfield' = 'ID'; - If: #_fields !>> #_keyfield; - Local: '_keyfield' = #_fields->First; - /If; - /If; - /If; - Local: '_index' = #_fields->(FindPosition: #_keyfield)->First; - Local: '_return' = (Local_Defined: 'returnfield') ? (Params->(Find: -ReturnField)->(ForEach: {Params->First = Params->First->Second; Return: True}) &) | @#_fields; - Local: '_exclude' = (Local_Defined: 'excludefield') ? (Params->(Find: -ExcludeField)->(ForEach: {Params->First = Params->First->Second; Return: True}) &) | Array; - Local: '_records' = Array; - Iterate: Records_Array, (Local: '_record'); - Local: '_temp' = Map; - Iterate: #_fields, (Local: '_field'); - ((#_return >> #_field) && (#_exclude !>> #_field)) ? #_temp->Insert(#_field = #_record->(Get: Loop_Count)); - /Iterate; - #_records->Insert(#_temp); - /Iterate; - Local: '_output' = (Encode_JSON: (Object: 'error_msg'=Error_Msg, 'error_code'=Error_Code, 'found_count'=Found_Count, 'keyfield'=#_keyfield, 'rows'=#_records)); - Return: @#_output; - - /Define_Tag; - -/If; - -?> diff --git a/tests/examplefiles/json.lasso9 b/tests/examplefiles/json.lasso9 deleted file mode 100644 index 732ab2af..00000000 --- a/tests/examplefiles/json.lasso9 +++ /dev/null @@ -1,213 +0,0 @@ - -/** - trait_json_serialize - Objects with this trait will be assumed to convert to json data - when its ->asString method is called -*/ -define trait_json_serialize => trait { - require asString() -} - -define json_serialize(e::bytes)::string => ('"' + (string(#e)->Replace(`\`, `\\`) & Replace('\"', '\\"') & Replace('\r', '\\r') & Replace('\n', '\\n') & Replace('\t', '\\t') & Replace('\f', '\\f') & Replace('\b', '\\b') &) + '"') -define json_serialize(e::string)::string => ('"' + (string(#e)->Replace(`\`, `\\`) & Replace('\"', '\\"') & Replace('\r', '\\r') & Replace('\n', '\\n') & Replace('\t', '\\t') & Replace('\f', '\\f') & Replace('\b', '\\b') &) + '"') -define json_serialize(e::json_literal)::string => (#e->asstring) -define json_serialize(e::integer)::string => (#e->asstring) -define json_serialize(e::decimal)::string => (#e->asstring) -define json_serialize(e::boolean)::string => (#e->asstring) -define json_serialize(e::null)::string => ('null') -define json_serialize(e::date)::string => ('"' + #e->format(#e->gmt ? '%QT%TZ' | '%Q%T') + '"') -/* -define json_serialize(e::array)::string => { - local(output) = ''; - local(delimit) = ''; - #e->foreach => { #output += #delimit + json_serialize(#1); #delimit = ', '; } - return('[' + #output + ']'); -} -define json_serialize(e::staticarray)::string => { - local(output) = ''; - local(delimit) = ''; - #e->foreach => { #output += #delimit + json_serialize(#1); #delimit = ', '; } - return('[' + #output + ']'); -} -*/ -define json_serialize(e::trait_forEach)::string => { - local(output) = ''; - local(delimit) = ''; - #e->foreach => { #output += #delimit + json_serialize(#1); #delimit = ', '; } - return('[' + #output + ']'); -} -define json_serialize(e::map)::string => { - local(output = with pr in #e->eachPair - select json_serialize(#pr->first->asString) + ': ' + json_serialize(#pr->second)) - return '{' + #output->join(',') + '}' -} -define json_serialize(e::json_object)::string => { - local(output) = ''; - local(delimit) = ''; - #e->foreachpair => { #output += #delimit + #1->first + ': ' + json_serialize(#1->second); #delimit = ', '; } - return('{' + #output + '}'); -} -define json_serialize(e::trait_json_serialize) => #e->asString -define json_serialize(e::any)::string => json_serialize('' + #e->serialize + '') - -// Bil Corry fixes for decoding json -define json_consume_string(ibytes::bytes) => { - local(obytes) = bytes; - local(temp) = 0; - while((#temp := #ibytes->export8bits) != 34); - #obytes->import8bits(#temp); - (#temp == 92) ? #obytes->import8bits(#ibytes->export8bits); // Escape \ - /while; - local(output = string(#obytes)->unescape) - //Replace('\\"', '\"') & Replace('\\r', '\r') & Replace('\\n', '\n') & Replace('\\t', '\t') & Replace('\\f', '\f') & Replace('\\b', '\b') &; - if(#output->BeginsWith('') && #output->EndsWith('')); - Protect; - return serialization_reader(xml(#output - '' - ''))->read - /Protect; - else( (#output->size == 16 or #output->size == 15) and regexp(`\d{8}T\d{6}Z?`, '', #output)->matches) - return date(#output, -Format=#output->size == 16?`yyyyMMdd'T'HHmmssZ`|`yyyyMMdd'T'HHmmss`) - /if - return #output -} - -// Bil Corry fix + Ke fix -define json_consume_token(ibytes::bytes, temp::integer) => { - - local(obytes = bytes->import8bits(#temp) &, - delimit = array(9, 10, 13, 32, 44, 58, 93, 125)) // \t\r\n ,:]} - - while(#delimit !>> (#temp := #ibytes->export8bits)) - #obytes->import8bits(#temp) - /while - - #temp == 125? // } - #ibytes->marker -= 1 -//============================================================================ -// Is also end of token if end of array[] - #temp == 93? // ] - #ibytes->marker -= 1 -//............................................................................ - - local(output = string(#obytes)) - #output == 'true'? - return true - #output == 'false'? - return false - #output == 'null'? - return null - string_IsNumeric(#output)? - return (#output >> '.')? decimal(#output) | integer(#output) - - return #output -} - -// Bil Corry fix -define json_consume_array(ibytes::bytes)::array => { - Local(output) = array; - local(delimit) = array( 9, 10, 13, 32, 44); // \t\r\n , - local(temp) = 0; - While((#temp := #ibytes->export8bits) != 93); // ] - If(#delimit >> #temp); - // Discard whitespace - Else(#temp == 34); // " - #output->insert(json_consume_string(#ibytes)); - Else(#temp == 91); // [ - #output->insert(json_consume_array(#ibytes)); - Else(#temp == 123); // { - #output->insert(json_consume_object(#ibytes)); - Else; - #output->insert(json_consume_token(#ibytes, #temp)); - (#temp == 93) ? Loop_Abort; - /If; - /While; - Return(#output); -} - -// Bil Corry fix -define json_consume_object(ibytes::bytes)::map => { - Local('output' = map, - 'delimit' = array( 9, 10, 13, 32, 44), // \t\r\n , - 'temp' = 0, - 'key' = null, - 'val' = null); - While((#temp := #ibytes->export8bits) != 125); // } - If(#delimit >> #temp); - // Discard whitespace - Else((#key !== null) && (#temp == 34)); // " - #output->insert(#key = json_consume_string(#ibytes)); - #key = null; - Else((#key !== null) && (#temp == 91)); // [ - #output->insert(#key = json_consume_array(#ibytes)); - #key = null; - Else((#key !== null) && (#temp == 123)); // { - #output->insert(#key = json_consume_object(#ibytes)); - #key = null; - Else((#key !== null)); - #output->insert(#key = json_consume_token(#ibytes, #temp)); - #key = null; - Else; - #key = json_consume_string(#ibytes); - while(#delimit >> (#temp := #ibytes->export8bits)); - /while; - #temp != 58 ? Loop_Abort; - /If; - /While; - - If((#output >> '__jsonclass__') && (#output->Find('__jsonclass__')->isa('array')) && (#output->Find('__jsonclass__')->size >= 2) && (#output->Find('__jsonclass__')->First == 'deserialize')); - Return(#output->find('__jsonclass__')->Second->First); - Else((#output >> 'native') && (#output >> 'comment') && (#output->find('comment') == 'http://www.lassosoft.com/json')); - Return(#output->find('native')); - /If; - Return(#output); -} - -// Bil Corry fix + Ke fix -define json_deserialize(ibytes::bytes)::any => { - #ibytes->removeLeading(bom_utf8); - -//============================================================================ -// Reset marker on provided bytes - #ibytes->marker = 0 -//............................................................................ - - Local(temp) = #ibytes->export8bits; - If(#temp == 91); // [ - Return(json_consume_array(#ibytes)); - Else(#temp == 123); // { - Return(json_consume_object(#ibytes)); - else(#temp == 34) // " - return json_consume_string(#ibytes) - /If; -} - -define json_deserialize(s::string) => json_deserialize(bytes(#s)) - -/**! json_literal - This is a subclass of String used for JSON encoding. - - A json_literal works exactly like a string, but will be inserted directly - rather than being encoded into JSON. This allows JavaScript elements - like functions to be inserted into JSON objects. This is most useful - when the JSON object will be used within a JavaScript on the local page. - [Map: 'fn'=Literal('function(){ ...})] => {'fn': function(){ ...}} -**/ -define json_literal => type { - parent string -} - -/**! json_object - This is a subclass of Map used for JSON encoding. - - An object works exactly like a map, but when it is encoded into JSON all - of the keys will be inserted literally. This makes it easy to create a - JavaScript object without extraneous quote marks. - Object('name'='value') => {name: "value"} -**/ -define json_object => type { - parent map - public onCreate(...) => ..onCreate(:#rest or (:)) -} - -define json_rpccall(method::string, params=map, id='', host='') => { - #id == '' ? #host = Lasso_UniqueID; - #host == '' ? #host = 'http://localhost/lassoapps.8/rpc/rpc.lasso'; - Return(Decode_JSON(Include_URL(#host, -PostParams=Encode_JSON(Map('method' = #method, 'params' = #params, 'id' = #id))))); -} diff --git a/tests/examplefiles/language.hy b/tests/examplefiles/language.hy deleted file mode 100644 index 9768c39c..00000000 --- a/tests/examplefiles/language.hy +++ /dev/null @@ -1,165 +0,0 @@ -;;;; This contains some of the core Hy functions used -;;;; to make functional programming slightly easier. -;;;; - - -(defn _numeric-check [x] - (if (not (numeric? x)) - (raise (TypeError (.format "{0!r} is not a number" x))))) - -(defn cycle [coll] - "Yield an infinite repetition of the items in coll" - (setv seen []) - (for [x coll] - (yield x) - (.append seen x)) - (while seen - (for [x seen] - (yield x)))) - -(defn dec [n] - "Decrement n by 1" - (_numeric-check n) - (- n 1)) - -(defn distinct [coll] - "Return a generator from the original collection with duplicates - removed" - (let [[seen []] [citer (iter coll)]] - (for [val citer] - (if (not_in val seen) - (do - (yield val) - (.append seen val)))))) - -(defn drop [count coll] - "Drop `count` elements from `coll` and yield back the rest" - (let [[citer (iter coll)]] - (try (for [i (range count)] - (next citer)) - (catch [StopIteration])) - citer)) - -(defn even? [n] - "Return true if n is an even number" - (_numeric-check n) - (= (% n 2) 0)) - -(defn filter [pred coll] - "Return all elements from `coll` that pass `pred`" - (let [[citer (iter coll)]] - (for [val citer] - (if (pred val) - (yield val))))) - -(defn inc [n] - "Increment n by 1" - (_numeric-check n) - (+ n 1)) - -(defn instance? [klass x] - (isinstance x klass)) - -(defn iterable? [x] - "Return true if x is iterable" - (try (do (iter x) true) - (catch [Exception] false))) - -(defn iterate [f x] - (setv val x) - (while true - (yield val) - (setv val (f val)))) - -(defn iterator? [x] - "Return true if x is an iterator" - (try (= x (iter x)) - (catch [TypeError] false))) - -(defn neg? [n] - "Return true if n is < 0" - (_numeric-check n) - (< n 0)) - -(defn none? [x] - "Return true if x is None" - (is x None)) - -(defn numeric? [x] - (import numbers) - (instance? numbers.Number x)) - -(defn nth [coll index] - "Return nth item in collection or sequence, counting from 0" - (if (not (neg? index)) - (if (iterable? coll) - (try (first (list (take 1 (drop index coll)))) - (catch [IndexError] None)) - (try (get coll index) - (catch [IndexError] None))) - None)) - -(defn odd? [n] - "Return true if n is an odd number" - (_numeric-check n) - (= (% n 2) 1)) - -(defn pos? [n] - "Return true if n is > 0" - (_numeric_check n) - (> n 0)) - -(defn remove [pred coll] - "Return coll with elements removed that pass `pred`" - (let [[citer (iter coll)]] - (for [val citer] - (if (not (pred val)) - (yield val))))) - -(defn repeat [x &optional n] - "Yield x forever or optionally n times" - (if (none? n) - (setv dispatch (fn [] (while true (yield x)))) - (setv dispatch (fn [] (for [_ (range n)] (yield x))))) - (dispatch)) - -(defn repeatedly [func] - "Yield result of running func repeatedly" - (while true - (yield (func)))) - -(defn take [count coll] - "Take `count` elements from `coll`, or the whole set if the total - number of entries in `coll` is less than `count`." - (let [[citer (iter coll)]] - (for [_ (range count)] - (yield (next citer))))) - -(defn take-nth [n coll] - "Return every nth member of coll - raises ValueError for (not (pos? n))" - (if (pos? n) - (let [[citer (iter coll)] [skip (dec n)]] - (for [val citer] - (yield val) - (for [_ (range skip)] - (next citer)))) - (raise (ValueError "n must be positive")))) - -(defn take-while [pred coll] - "Take all elements while `pred` is true" - (let [[citer (iter coll)]] - (for [val citer] - (if (pred val) - (yield val) - (break))))) - -(defn zero? [n] - "Return true if n is 0" - (_numeric_check n) - (= n 0)) - -(def *exports* ["cycle" "dec" "distinct" "drop" "even?" "filter" "inc" - "instance?" "iterable?" "iterate" "iterator?" "neg?" - "none?" "nth" "numeric?" "odd?" "pos?" "remove" "repeat" - "repeatedly" "take" "take_nth" "take_while" "zero?"]) diff --git a/tests/examplefiles/lighttpd_config.conf b/tests/examplefiles/lighttpd_config.conf deleted file mode 100644 index 8475f378..00000000 --- a/tests/examplefiles/lighttpd_config.conf +++ /dev/null @@ -1,13 +0,0 @@ -fastcgi.server = ( ".php" => (( - "bin-path" => "/path/to/php-cgi", - "socket" => "/tmp/php.socket", - "max-procs" => 2, - "bin-environment" => ( - "PHP_FCGI_CHILDREN" => "16", - "PHP_FCGI_MAX_REQUESTS" => "10000" - ), - "bin-copy-environment" => ( - "PATH", "SHELL", "USER" - ), - "broken-scriptfilename" => "enable" - ))) diff --git a/tests/examplefiles/limbo.b b/tests/examplefiles/limbo.b deleted file mode 100644 index e55a0a62..00000000 --- a/tests/examplefiles/limbo.b +++ /dev/null @@ -1,456 +0,0 @@ -implement Ninewin; -include "sys.m"; - sys: Sys; -include "draw.m"; - draw: Draw; - Image, Display, Pointer: import draw; -include "arg.m"; -include "keyboard.m"; -include "tk.m"; -include "wmclient.m"; - wmclient: Wmclient; - Window: import wmclient; -include "sh.m"; - sh: Sh; - -# run a p9 graphics program (default rio) under inferno wm, -# making available to it: -# /dev/winname - naming the current inferno window (changing on resize) -# /dev/mouse - pointer file + resize events; write to change position -# /dev/cursor - change appearance of cursor. -# /dev/draw - inferno draw device -# /dev/cons - read keyboard events, write to 9win stdout. - -Ninewin: module { - init: fn(ctxt: ref Draw->Context, argv: list of string); -}; -winname: string; - -init(ctxt: ref Draw->Context, argv: list of string) -{ - size := Draw->Point(500, 500); - sys = load Sys Sys->PATH; - draw = load Draw Draw->PATH; - wmclient = load Wmclient Wmclient->PATH; - wmclient->init(); - sh = load Sh Sh->PATH; - - buts := Wmclient->Resize; - if(ctxt == nil){ - ctxt = wmclient->makedrawcontext(); - buts = Wmclient->Plain; - } - arg := load Arg Arg->PATH; - arg->init(argv); - arg->setusage("9win [-s] [-x width] [-y height]"); - exportonly := 0; - while(((opt := arg->opt())) != 0){ - case opt { - 's' => - exportonly = 1; - 'x' => - size.x = int arg->earg(); - 'y' => - size.y = int arg->earg(); - * => - arg->usage(); - } - } - if(size.x < 1 || size.y < 1) - arg->usage(); - argv = arg->argv(); - if(argv != nil && hd argv == "-s"){ - exportonly = 1; - argv = tl argv; - } - if(argv == nil && !exportonly) - argv = "rio" :: nil; - if(argv != nil && exportonly){ - sys->fprint(sys->fildes(2), "9win: no command allowed with -s flag\n"); - raise "fail:usage"; - } - title := "9win"; - if(!exportonly) - title += " " + hd argv; - w := wmclient->window(ctxt, title, buts); - w.reshape(((0, 0), size)); - w.onscreen(nil); - if(w.image == nil){ - sys->fprint(sys->fildes(2), "9win: cannot get image to draw on\n"); - raise "fail:no window"; - } - - sys->pctl(Sys->FORKNS|Sys->NEWPGRP, nil); - ld := "/n/9win"; - if(sys->bind("#s", ld, Sys->MREPL) == -1 && - sys->bind("#s", ld = "/n/local", Sys->MREPL) == -1){ - sys->fprint(sys->fildes(2), "9win: cannot bind files: %r\n"); - raise "fail:error"; - } - w.startinput("kbd" :: "ptr" :: nil); - spawn ptrproc(rq := chan of Sys->Rread, ptr := chan[10] of ref Pointer, reshape := chan[1] of int); - - - fwinname := sys->file2chan(ld, "winname"); - fconsctl := sys->file2chan(ld, "consctl"); - fcons := sys->file2chan(ld, "cons"); - fmouse := sys->file2chan(ld, "mouse"); - fcursor := sys->file2chan(ld, "cursor"); - if(!exportonly){ - spawn run(sync := chan of string, w.ctl, ld, argv); - if((e := <-sync) != nil){ - sys->fprint(sys->fildes(2), "9win: %s", e); - raise "fail:error"; - } - } - spawn serveproc(w, rq, fwinname, fconsctl, fcons, fmouse, fcursor); - if(!exportonly){ - # handle events synchronously so that we don't get a "killed" message - # from the shell. - handleevents(w, ptr, reshape); - }else{ - spawn handleevents(w, ptr, reshape); - sys->bind(ld, "/dev", Sys->MBEFORE); - export(sys->fildes(0), w.ctl); - } -} - -handleevents(w: ref Window, ptr: chan of ref Pointer, reshape: chan of int) -{ - for(;;)alt{ - c := <-w.ctxt.ctl or - c = <-w.ctl => - e := w.wmctl(c); - if(e != nil) - sys->fprint(sys->fildes(2), "9win: ctl error: %s\n", e); - if(e == nil && c != nil && c[0] == '!'){ - alt{ - reshape <-= 1 => - ; - * => - ; - } - winname = nil; - } - p := <-w.ctxt.ptr => - if(w.pointer(*p) == 0){ - # XXX would block here if client isn't reading mouse... but we do want to - # extert back-pressure, which conflicts. - alt{ - ptr <-= p => - ; - * => - ; # sys->fprint(sys->fildes(2), "9win: discarding mouse event\n"); - } - } - } -} - -serveproc(w: ref Window, mouserq: chan of Sys->Rread, fwinname, fconsctl, fcons, fmouse, fcursor: ref Sys->FileIO) -{ - winid := 0; - krc: list of Sys->Rread; - ks: string; - - for(;;)alt { - c := <-w.ctxt.kbd => - ks[len ks] = inf2p9key(c); - if(krc != nil){ - hd krc <-= (array of byte ks, nil); - ks = nil; - krc = tl krc; - } - (nil, d, nil, wc) := <-fcons.write => - if(wc != nil){ - sys->write(sys->fildes(1), d, len d); - wc <-= (len d, nil); - } - (nil, nil, nil, rc) := <-fcons.read => - if(rc != nil){ - if(ks != nil){ - rc <-= (array of byte ks, nil); - ks = nil; - }else - krc = rc :: krc; - } - (offset, nil, nil, rc) := <-fwinname.read => - if(rc != nil){ - if(winname == nil){ - winname = sys->sprint("noborder.9win.%d", winid++); - if(w.image.name(winname, 1) == -1){ - sys->fprint(sys->fildes(2), "9win: namewin %q failed: %r", winname); - rc <-= (nil, "namewin failure"); - break; - } - } - d := array of byte winname; - if(offset < len d) - d = d[offset:]; - else - d = nil; - rc <-= (d, nil); - } - (nil, nil, nil, wc) := <-fwinname.write => - if(wc != nil) - wc <-= (-1, "permission denied"); - (nil, nil, nil, rc) := <-fconsctl.read => - if(rc != nil) - rc <-= (nil, "permission denied"); - (nil, d, nil, wc) := <-fconsctl.write => - if(wc != nil){ - if(string d != "rawon") - wc <-= (-1, "cannot change console mode"); - else - wc <-= (len d, nil); - } - (nil, nil, nil, rc) := <-fmouse.read => - if(rc != nil) - mouserq <-= rc; - (nil, d, nil, wc) := <-fmouse.write => - if(wc != nil){ - e := cursorset(w, string d); - if(e == nil) - wc <-= (len d, nil); - else - wc <-= (-1, e); - } - (nil, nil, nil, rc) := <-fcursor.read => - if(rc != nil) - rc <-= (nil, "permission denied"); - (nil, d, nil, wc) := <-fcursor.write => - if(wc != nil){ - e := cursorswitch(w, d); - if(e == nil) - wc <-= (len d, nil); - else - wc <-= (-1, e); - } - } -} - -ptrproc(rq: chan of Sys->Rread, ptr: chan of ref Pointer, reshape: chan of int) -{ - rl: list of Sys->Rread; - c := ref Pointer(0, (0, 0), 0); - for(;;){ - ch: int; - alt{ - p := <-ptr => - ch = 'm'; - c = p; - <-reshape => - ch = 'r'; - rc := <-rq => - rl = rc :: rl; - continue; - } - if(rl == nil) - rl = <-rq :: rl; - hd rl <-= (sys->aprint("%c%11d %11d %11d %11d ", ch, c.xy.x, c.xy.y, c.buttons, c.msec), nil); - rl = tl rl; - } -} - -cursorset(w: ref Window, m: string): string -{ - if(m == nil || m[0] != 'm') - return "invalid mouse message"; - x := int m[1:]; - for(i := 1; i < len m; i++) - if(m[i] == ' '){ - while(m[i] == ' ') - i++; - break; - } - if(i == len m) - return "invalid mouse message"; - y := int m[i:]; - return w.wmctl(sys->sprint("ptr %d %d", x, y)); -} - -cursorswitch(w: ref Window, d: array of byte): string -{ - Hex: con "0123456789abcdef"; - if(len d != 2*4+64) - return w.wmctl("cursor"); - hot := Draw->Point(bglong(d, 0*4), bglong(d, 1*4)); - s := sys->sprint("cursor %d %d 16 32 ", hot.x, hot.y); - for(i := 2*4; i < len d; i++){ - c := int d[i]; - s[len s] = Hex[c >> 4]; - s[len s] = Hex[c & 16rf]; - } - return w.wmctl(s); -} - -run(sync, ctl: chan of string, ld: string, argv: list of string) -{ - Rcmeta: con "|<>&^*[]?();"; - sys->pctl(Sys->FORKNS, nil); - if(sys->bind("#₪", "/srv", Sys->MCREATE) == -1){ - sync <-= sys->sprint("cannot bind srv device: %r"); - exit; - } - srvname := "/srv/9win."+string sys->pctl(0, nil); # XXX do better. - fd := sys->create(srvname, Sys->ORDWR, 8r600); - if(fd == nil){ - sync <-= sys->sprint("cannot create %s: %r", srvname); - exit; - } - sync <-= nil; - spawn export(fd, ctl); - sh->run(nil, "os" :: - "rc" :: "-c" :: - "mount "+srvname+" /mnt/term;"+ - "rm "+srvname+";"+ - "bind -b /mnt/term"+ld+" /dev;"+ - "bind /mnt/term/dev/draw /dev/draw ||"+ - "bind -a /mnt/term/dev /dev;"+ - quotedc("cd"::"/mnt/term"+cwd()::nil, Rcmeta)+";"+ - quotedc(argv, Rcmeta)+";":: - nil - ); -} - -export(fd: ref Sys->FD, ctl: chan of string) -{ - sys->export(fd, "/", Sys->EXPWAIT); - ctl <-= "exit"; -} - -inf2p9key(c: int): int -{ - KF: import Keyboard; - - P9KF: con 16rF000; - Spec: con 16rF800; - Khome: con P9KF|16r0D; - Kup: con P9KF|16r0E; - Kpgup: con P9KF|16r0F; - Kprint: con P9KF|16r10; - Kleft: con P9KF|16r11; - Kright: con P9KF|16r12; - Kdown: con Spec|16r00; - Kview: con Spec|16r00; - Kpgdown: con P9KF|16r13; - Kins: con P9KF|16r14; - Kend: con P9KF|16r18; - Kalt: con P9KF|16r15; - Kshift: con P9KF|16r16; - Kctl: con P9KF|16r17; - - case c { - Keyboard->LShift => - return Kshift; - Keyboard->LCtrl => - return Kctl; - Keyboard->LAlt => - return Kalt; - Keyboard->Home => - return Khome; - Keyboard->End => - return Kend; - Keyboard->Up => - return Kup; - Keyboard->Down => - return Kdown; - Keyboard->Left => - return Kleft; - Keyboard->Right => - return Kright; - Keyboard->Pgup => - return Kpgup; - Keyboard->Pgdown => - return Kpgdown; - Keyboard->Ins => - return Kins; - - # function keys - KF|1 or - KF|2 or - KF|3 or - KF|4 or - KF|5 or - KF|6 or - KF|7 or - KF|8 or - KF|9 or - KF|10 or - KF|11 or - KF|12 => - return (c - KF) + P9KF; - } - return c; -} - -cwd(): string -{ - return sys->fd2path(sys->open(".", Sys->OREAD)); -} - -# from string.b, waiting for declaration to be uncommented. -quotedc(argv: list of string, cl: string): string -{ - s := ""; - while (argv != nil) { - arg := hd argv; - for (i := 0; i < len arg; i++) { - c := arg[i]; - if (c == ' ' || c == '\t' || c == '\n' || c == '\'' || in(c, cl)) - break; - } - if (i < len arg || arg == nil) { - s += "'" + arg[0:i]; - for (; i < len arg; i++) { - if (arg[i] == '\'') - s[len s] = '\''; - s[len s] = arg[i]; - } - s[len s] = '\''; - } else - s += arg; - if (tl argv != nil) - s[len s] = ' '; - argv = tl argv; - } - return s; -} - -in(c: int, s: string): int -{ - n := len s; - if(n == 0) - return 0; - ans := 0; - negate := 0; - if(s[0] == '^') { - negate = 1; - s = s[1:]; - n--; - } - for(i := 0; i < n; i++) { - if(s[i] == '-' && i > 0 && i < n-1) { - if(c >= s[i-1] && c <= s[i+1]) { - ans = 1; - break; - } - i++; - } - else - if(c == s[i]) { - ans = 1; - break; - } - } - if(negate) - ans = !ans; - - # just to showcase labels -skip: - return ans; -} - -bglong(d: array of byte, i: int): int -{ - return int d[i] | (int d[i+1]<<8) | (int d[i+2]<<16) | (int d[i+3]<<24); -} diff --git a/tests/examplefiles/linecontinuation.py b/tests/examplefiles/linecontinuation.py deleted file mode 100644 index 2a41c31c..00000000 --- a/tests/examplefiles/linecontinuation.py +++ /dev/null @@ -1,47 +0,0 @@ -apple.filter(x, y) -apple.\ - filter(x, y) - -1 \ - . \ - __str__ - -from os import path -from \ - os \ - import \ - path - -import os.path as something - -import \ - os.path \ - as \ - something - -class \ - Spam: - pass - -class Spam: pass - -class Spam(object): - pass - -class \ - Spam \ - ( - object - ) \ - : - pass - - -def \ - spam \ - ( \ - ) \ - : \ - pass - - diff --git a/tests/examplefiles/livescript-demo.ls b/tests/examplefiles/livescript-demo.ls deleted file mode 100644 index 03cbcc99..00000000 --- a/tests/examplefiles/livescript-demo.ls +++ /dev/null @@ -1,43 +0,0 @@ -a = -> [1 to 50] -const b = --> [2 til 5] -var c = ~~> 10_000_000km * 500ms - 16~ff / 32~lol -e = (a) -> (b) ~> (c) --> (d, e) ~~> <[list of words]> -dashes-identifiers = -> - a - a b -- c 1-1 1- -1 a- a a -a -underscores_i$d = -> - /regexp1/ - //regexp2//g - 'strings' and "strings" and \strings and \#$-"\'strings - -another-word-list = <[ more words ]> - -[2 til 10] - |> map (* 2) - |> filter (> 5) - |> fold (+) - -obj = - prop1: 1 - prop2: 2 - -class Class extends Anc-est-or - (args) -> - <- # Comment - <~ /* Comment */ - void undefined yes no on off - a.void b.undefined c.off d.if f.no g.not - avoid bundefined coff dif fno gnot - "inter #{2 + 2} #variable" - '''HELLO 'world' ''' - -copy = (from, to, callback) --> - error, data <- read file - return callback error if error? - error <~ write file, data - return callback error if error? - callback() - -take(n, [x, ...xs]:list) = - | n <= 0 => [] - | empty list => [] - | otherwise => [x] +++ take n - 1, xs diff --git a/tests/examplefiles/logos_example.xm b/tests/examplefiles/logos_example.xm deleted file mode 100644 index 39753e23..00000000 --- a/tests/examplefiles/logos_example.xm +++ /dev/null @@ -1,28 +0,0 @@ -%hook ABC -- (id)a:(B)b { - %log; - return %orig(nil); -} -%end - -%subclass DEF: NSObject -- (id)init { - [%c(RuntimeAccessibleClass) alloc]; - return nil; -} -%end - -%group OptionalHooks -%hook ABC -- (void)release { - [self retain]; - %orig; -} -%end -%end - -%ctor { - %init; - if(OptionalCondition) - %init(OptionalHooks); -} diff --git a/tests/examplefiles/ltmain.sh b/tests/examplefiles/ltmain.sh deleted file mode 100644 index 5b5f845f..00000000 --- a/tests/examplefiles/ltmain.sh +++ /dev/null @@ -1,2849 +0,0 @@ -# ltmain.sh - Provide generalized library-building support services. -# NOTE: Changing this file will not affect anything until you rerun configure. -# -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 -# Free Software Foundation, Inc. -# Originally by Gordon Matzigkeit , 1996 -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -basename="s,^.*/,,g" - -# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh -# is ksh but when the shell is invoked as "sh" and the current value of -# the _XPG environment variable is not equal to 1 (one), the special -# positional parameter $0, within a function call, is the name of the -# function. -progpath="$0" - -# define SED for historic ltconfig's generated by Libtool 1.3 -test -z "$SED" && SED=sed - -# The name of this program: -progname=`echo "$progpath" | $SED $basename` -modename="$progname" - -# Global variables: -EXIT_SUCCESS=0 -EXIT_FAILURE=1 - -PROGRAM=ltmain.sh -PACKAGE=libtool -VERSION=1.5.22 -TIMESTAMP=" (1.1220.2.365 2005/12/18 22:14:06)" - -# See if we are running on zsh, and set the options which allow our -# commands through without removal of \ escapes. -if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST -fi -# Same for EGREP, and just to be sure, do LTCC as well -if test "X$EGREP" = X ; then - EGREP=egrep -fi -if test "X$LTCC" = X ; then - LTCC=${CC-gcc} -fi - -# Check that we have a working $echo. -if test "X$1" = X--no-reexec; then - # Discard the --no-reexec flag, and continue. - shift -elif test "X$1" = X--fallback-echo; then - # Avoid inline document here, it may be left over - : -elif test "X`($echo '\t') 2>/dev/null`" = 'X\t'; then - # Yippee, $echo works! - : -else - # Restart under the correct shell, and then maybe $echo will work. - exec $SHELL "$progpath" --no-reexec ${1+"$@"} -fi - -if test "X$1" = X--fallback-echo; then - # used as fallback echo - shift - cat <&2 - $echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2 - exit $EXIT_FAILURE -fi - -# Global variables. -mode=$default_mode -nonopt= -prev= -prevopt= -run= -show="$echo" -show_help= -execute_dlfiles= -duplicate_deps=no -preserve_args= -lo2o="s/\\.lo\$/.${objext}/" -o2lo="s/\\.${objext}\$/.lo/" - -if test -z "$max_cmd_len"; then - i=0 - testring="ABCD" - new_result= - - # If test is not a shell built-in, we'll probably end up computing a - # maximum length that is only half of the actual maximum length, but - # we can't tell. - while (test "X"`$SHELL $0 --fallback-echo "X$testring" 2>/dev/null` \ - = "XX$testring") >/dev/null 2>&1 && - new_result=`expr "X$testring" : ".*" 2>&1` && - max_cmd_len="$new_result" && - test "$i" != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - testring="$testring$testring" - done - testring= - # Add a significant safety factor because C++ compilers can tack on massive - # amounts of additional arguments before passing them to the linker. - # It appears as though 1/2 is a usable value. - max_cmd_len=`expr $max_cmd_len \/ 2` -fi - -##################################### -# Shell function definitions: -# This seems to be the best place for them - -# func_mktempdir [string] -# Make a temporary directory that won't clash with other running -# libtool processes, and avoids race conditions if possible. If -# given, STRING is the basename for that directory. -func_mktempdir () -{ - my_template="${TMPDIR-/tmp}/${1-$progname}" - - if test "$run" = ":"; then - # Return a directory name, but don't create it in dry-run mode - my_tmpdir="${my_template}-$$" - else - - # If mktemp works, use that first and foremost - my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` - - if test ! -d "$my_tmpdir"; then - # Failing that, at least try and use $RANDOM to avoid a race - my_tmpdir="${my_template}-${RANDOM-0}$$" - - save_mktempdir_umask=`umask` - umask 0077 - $mkdir "$my_tmpdir" - umask $save_mktempdir_umask - fi - - # If we're not in dry-run mode, bomb out on failure - test -d "$my_tmpdir" || { - $echo "cannot create temporary directory \`$my_tmpdir'" 1>&2 - exit $EXIT_FAILURE - } - fi - - $echo "X$my_tmpdir" | $Xsed -} - - -# func_win32_libid arg -# return the library type of file 'arg' -# -# Need a lot of goo to handle *both* DLLs and import libs -# Has to be a shell function in order to 'eat' the argument -# that is supplied when $file_magic_command is called. -func_win32_libid () -{ - win32_libid_type="unknown" - win32_fileres=`file -L $1 2>/dev/null` - case $win32_fileres in - *ar\ archive\ import\ library*) # definitely import - win32_libid_type="x86 archive import" - ;; - *ar\ archive*) # could be an import, or static - if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | \ - $EGREP -e 'file format pe-i386(.*architecture: i386)?' >/dev/null ; then - win32_nmres=`eval $NM -f posix -A $1 | \ - $SED -n -e '1,100{/ I /{s,.*,import,;p;q;};}'` - case $win32_nmres in - import*) win32_libid_type="x86 archive import";; - *) win32_libid_type="x86 archive static";; - esac - fi - ;; - *DLL*) - win32_libid_type="x86 DLL" - ;; - *executable*) # but shell scripts are "executable" too... - case $win32_fileres in - *MS\ Windows\ PE\ Intel*) - win32_libid_type="x86 DLL" - ;; - esac - ;; - esac - $echo $win32_libid_type -} - - -# func_infer_tag arg -# Infer tagged configuration to use if any are available and -# if one wasn't chosen via the "--tag" command line option. -# Only attempt this if the compiler in the base compile -# command doesn't match the default compiler. -# arg is usually of the form 'gcc ...' -func_infer_tag () -{ - if test -n "$available_tags" && test -z "$tagname"; then - CC_quoted= - for arg in $CC; do - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - CC_quoted="$CC_quoted $arg" - done - case $@ in - # Blanks in the command may have been stripped by the calling shell, - # but not from the CC environment variable when configure was run. - " $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*) ;; - # Blanks at the start of $base_compile will cause this to fail - # if we don't check for them as well. - *) - for z in $available_tags; do - if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then - # Evaluate the configuration. - eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" - CC_quoted= - for arg in $CC; do - # Double-quote args containing other shell metacharacters. - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - CC_quoted="$CC_quoted $arg" - done - # user sometimes does CC=-gcc so we need to match that to 'gcc' - trimedcc=`echo ${CC} | $SED -e "s/${host}-//g"` - # and sometimes libtool has CC=-gcc but user does CC=gcc - extendcc=${host}-${CC} - # and sometimes libtool has CC=-gcc but user has CC=-gcc - # (Gentoo-specific hack because we always export $CHOST) - mungedcc=${CHOST-${host}}-${trimedcc} - case "$@ " in - "cc "* | " cc "* | "${host}-cc "* | " ${host}-cc "*|\ - "gcc "* | " gcc "* | "${host}-gcc "* | " ${host}-gcc "*) - tagname=CC - break ;; - "$trimedcc "* | " $trimedcc "* | "`$echo $trimedcc` "* | " `$echo $trimedcc` "*|\ - "$extendcc "* | " $extendcc "* | "`$echo $extendcc` "* | " `$echo $extendcc` "*|\ - "$mungedcc "* | " $mungedcc "* | "`$echo $mungedcc` "* | " `$echo $mungedcc` "*|\ - " $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*) - # The compiler in the base compile command matches - # the one in the tagged configuration. - # Assume this is the tagged configuration we want. - tagname=$z - break - ;; - esac - fi - done - # If $tagname still isn't set, then no tagged configuration - # was found and let the user know that the "--tag" command - # line option must be used. - if test -z "$tagname"; then - $echo "$modename: unable to infer tagged configuration" - $echo "$modename: specify a tag with \`--tag'" 1>&2 - exit $EXIT_FAILURE -# else -# $echo "$modename: using $tagname tagged configuration" - fi - ;; - esac - fi -} - - -# func_extract_an_archive dir oldlib -func_extract_an_archive () -{ - f_ex_an_ar_dir="$1"; shift - f_ex_an_ar_oldlib="$1" - - $show "(cd $f_ex_an_ar_dir && $AR x $f_ex_an_ar_oldlib)" - $run eval "(cd \$f_ex_an_ar_dir && $AR x \$f_ex_an_ar_oldlib)" || exit $? - if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then - : - else - $echo "$modename: ERROR: object name conflicts: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" 1>&2 - exit $EXIT_FAILURE - fi -} - -# func_extract_archives gentop oldlib ... -func_extract_archives () -{ - my_gentop="$1"; shift - my_oldlibs=${1+"$@"} - my_oldobjs="" - my_xlib="" - my_xabs="" - my_xdir="" - my_status="" - - $show "${rm}r $my_gentop" - $run ${rm}r "$my_gentop" - $show "$mkdir $my_gentop" - $run $mkdir "$my_gentop" - my_status=$? - if test "$my_status" -ne 0 && test ! -d "$my_gentop"; then - exit $my_status - fi - - for my_xlib in $my_oldlibs; do - # Extract the objects. - case $my_xlib in - [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; - *) my_xabs=`pwd`"/$my_xlib" ;; - esac - my_xlib=`$echo "X$my_xlib" | $Xsed -e 's%^.*/%%'` - my_xdir="$my_gentop/$my_xlib" - - $show "${rm}r $my_xdir" - $run ${rm}r "$my_xdir" - $show "$mkdir $my_xdir" - $run $mkdir "$my_xdir" - exit_status=$? - if test "$exit_status" -ne 0 && test ! -d "$my_xdir"; then - exit $exit_status - fi - case $host in - *-darwin*) - $show "Extracting $my_xabs" - # Do not bother doing anything if just a dry run - if test -z "$run"; then - darwin_orig_dir=`pwd` - cd $my_xdir || exit $? - darwin_archive=$my_xabs - darwin_curdir=`pwd` - darwin_base_archive=`$echo "X$darwin_archive" | $Xsed -e 's%^.*/%%'` - darwin_arches=`lipo -info "$darwin_archive" 2>/dev/null | $EGREP Architectures 2>/dev/null` - if test -n "$darwin_arches"; then - darwin_arches=`echo "$darwin_arches" | $SED -e 's/.*are://'` - darwin_arch= - $show "$darwin_base_archive has multiple architectures $darwin_arches" - for darwin_arch in $darwin_arches ; do - mkdir -p "unfat-$$/${darwin_base_archive}-${darwin_arch}" - lipo -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" - cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" - func_extract_an_archive "`pwd`" "${darwin_base_archive}" - cd "$darwin_curdir" - $rm "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" - done # $darwin_arches - ## Okay now we have a bunch of thin objects, gotta fatten them up :) - darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print| xargs basename | sort -u | $NL2SP` - darwin_file= - darwin_files= - for darwin_file in $darwin_filelist; do - darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP` - lipo -create -output "$darwin_file" $darwin_files - done # $darwin_filelist - ${rm}r unfat-$$ - cd "$darwin_orig_dir" - else - cd "$darwin_orig_dir" - func_extract_an_archive "$my_xdir" "$my_xabs" - fi # $darwin_arches - fi # $run - ;; - *) - func_extract_an_archive "$my_xdir" "$my_xabs" - ;; - esac - my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` - done - func_extract_archives_result="$my_oldobjs" -} -# End of Shell function definitions -##################################### - -# Darwin sucks -eval std_shrext=\"$shrext_cmds\" - -disable_libs=no - -# Parse our command line options once, thoroughly. -while test "$#" -gt 0 -do - arg="$1" - shift - - case $arg in - -*=*) optarg=`$echo "X$arg" | $Xsed -e 's/[-_a-zA-Z0-9]*=//'` ;; - *) optarg= ;; - esac - - # If the previous option needs an argument, assign it. - if test -n "$prev"; then - case $prev in - execute_dlfiles) - execute_dlfiles="$execute_dlfiles $arg" - ;; - tag) - tagname="$arg" - preserve_args="${preserve_args}=$arg" - - # Check whether tagname contains only valid characters - case $tagname in - *[!-_A-Za-z0-9,/]*) - $echo "$progname: invalid tag name: $tagname" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - case $tagname in - CC) - # Don't test for the "default" C tag, as we know, it's there, but - # not specially marked. - ;; - *) - if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$" < "$progpath" > /dev/null; then - taglist="$taglist $tagname" - # Evaluate the configuration. - eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$tagname'$/,/^# ### END LIBTOOL TAG CONFIG: '$tagname'$/p' < $progpath`" - else - $echo "$progname: ignoring unknown tag $tagname" 1>&2 - fi - ;; - esac - ;; - *) - eval "$prev=\$arg" - ;; - esac - - prev= - prevopt= - continue - fi - - # Have we seen a non-optional argument yet? - case $arg in - --help) - show_help=yes - ;; - - --version) - $echo "$PROGRAM (GNU $PACKAGE) $VERSION$TIMESTAMP" - $echo - $echo "Copyright (C) 2005 Free Software Foundation, Inc." - $echo "This is free software; see the source for copying conditions. There is NO" - $echo "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - exit $? - ;; - - --config) - ${SED} -e '1,/^# ### BEGIN LIBTOOL CONFIG/d' -e '/^# ### END LIBTOOL CONFIG/,$d' $progpath - # Now print the configurations for the tags. - for tagname in $taglist; do - ${SED} -n -e "/^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$/,/^# ### END LIBTOOL TAG CONFIG: $tagname$/p" < "$progpath" - done - exit $? - ;; - - --debug) - $echo "$progname: enabling shell trace mode" - set -x - preserve_args="$preserve_args $arg" - ;; - - --dry-run | -n) - run=: - ;; - - --features) - $echo "host: $host" - if test "$build_libtool_libs" = yes; then - $echo "enable shared libraries" - else - $echo "disable shared libraries" - fi - if test "$build_old_libs" = yes; then - $echo "enable static libraries" - else - $echo "disable static libraries" - fi - exit $? - ;; - - --finish) mode="finish" ;; - - --mode) prevopt="--mode" prev=mode ;; - --mode=*) mode="$optarg" ;; - - --preserve-dup-deps) duplicate_deps="yes" ;; - - --quiet | --silent) - show=: - preserve_args="$preserve_args $arg" - ;; - - --tag) - prevopt="--tag" - prev=tag - preserve_args="$preserve_args --tag" - ;; - --tag=*) - set tag "$optarg" ${1+"$@"} - shift - prev=tag - preserve_args="$preserve_args --tag" - ;; - - -dlopen) - prevopt="-dlopen" - prev=execute_dlfiles - ;; - - -*) - $echo "$modename: unrecognized option \`$arg'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - ;; - - *) - nonopt="$arg" - break - ;; - esac -done - -if test -n "$prevopt"; then - $echo "$modename: option \`$prevopt' requires an argument" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE -fi - -case $disable_libs in -no) - ;; -shared) - build_libtool_libs=no - build_old_libs=yes - ;; -static) - build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac` - ;; -esac - -# If this variable is set in any of the actions, the command in it -# will be execed at the end. This prevents here-documents from being -# left over by shells. -exec_cmd= - -if test -z "$show_help"; then - - # Infer the operation mode. - if test -z "$mode"; then - $echo "*** Warning: inferring the mode of operation is deprecated." 1>&2 - $echo "*** Future versions of Libtool will require --mode=MODE be specified." 1>&2 - case $nonopt in - *cc | cc* | *++ | gcc* | *-gcc* | g++* | xlc*) - mode=link - for arg - do - case $arg in - -c) - mode=compile - break - ;; - esac - done - ;; - *db | *dbx | *strace | *truss) - mode=execute - ;; - *install*|cp|mv) - mode=install - ;; - *rm) - mode=uninstall - ;; - *) - # If we have no mode, but dlfiles were specified, then do execute mode. - test -n "$execute_dlfiles" && mode=execute - - # Just use the default operation mode. - if test -z "$mode"; then - if test -n "$nonopt"; then - $echo "$modename: warning: cannot infer operation mode from \`$nonopt'" 1>&2 - else - $echo "$modename: warning: cannot infer operation mode without MODE-ARGS" 1>&2 - fi - fi - ;; - esac - fi - - # Only execute mode is allowed to have -dlopen flags. - if test -n "$execute_dlfiles" && test "$mode" != execute; then - $echo "$modename: unrecognized option \`-dlopen'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # Change the help message to a mode-specific one. - generic_help="$help" - help="Try \`$modename --help --mode=$mode' for more information." - - # These modes are in order of execution frequency so that they run quickly. - case $mode in - # libtool compile mode - compile) - modename="$modename: compile" - # Get the compilation command and the source file. - base_compile= - srcfile="$nonopt" # always keep a non-empty value in "srcfile" - suppress_opt=yes - suppress_output= - arg_mode=normal - libobj= - later= - - for arg - do - case $arg_mode in - arg ) - # do not "continue". Instead, add this to base_compile - lastarg="$arg" - arg_mode=normal - ;; - - target ) - libobj="$arg" - arg_mode=normal - continue - ;; - - normal ) - # Accept any command-line options. - case $arg in - -o) - if test -n "$libobj" ; then - $echo "$modename: you cannot specify \`-o' more than once" 1>&2 - exit $EXIT_FAILURE - fi - arg_mode=target - continue - ;; - - -static | -prefer-pic | -prefer-non-pic) - later="$later $arg" - continue - ;; - - -no-suppress) - suppress_opt=no - continue - ;; - - -Xcompiler) - arg_mode=arg # the next one goes into the "base_compile" arg list - continue # The current "srcfile" will either be retained or - ;; # replaced later. I would guess that would be a bug. - - -Wc,*) - args=`$echo "X$arg" | $Xsed -e "s/^-Wc,//"` - lastarg= - save_ifs="$IFS"; IFS=',' - for arg in $args; do - IFS="$save_ifs" - - # Double-quote args containing other shell metacharacters. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, so we specify it separately. - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - lastarg="$lastarg $arg" - done - IFS="$save_ifs" - lastarg=`$echo "X$lastarg" | $Xsed -e "s/^ //"` - - # Add the arguments to base_compile. - base_compile="$base_compile $lastarg" - continue - ;; - - * ) - # Accept the current argument as the source file. - # The previous "srcfile" becomes the current argument. - # - lastarg="$srcfile" - srcfile="$arg" - ;; - esac # case $arg - ;; - esac # case $arg_mode - - # Aesthetically quote the previous argument. - lastarg=`$echo "X$lastarg" | $Xsed -e "$sed_quote_subst"` - - case $lastarg in - # Double-quote args containing other shell metacharacters. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, and some SunOS ksh mistreat backslash-escaping - # in scan sets (worked around with variable expansion), - # and furthermore cannot handle '|' '&' '(' ')' in scan sets - # at all, so we specify them separately. - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - lastarg="\"$lastarg\"" - ;; - esac - - base_compile="$base_compile $lastarg" - done # for arg - - case $arg_mode in - arg) - $echo "$modename: you must specify an argument for -Xcompile" - exit $EXIT_FAILURE - ;; - target) - $echo "$modename: you must specify a target with \`-o'" 1>&2 - exit $EXIT_FAILURE - ;; - *) - # Get the name of the library object. - [ -z "$libobj" ] && libobj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%'` - ;; - esac - - # Recognize several different file suffixes. - # If the user specifies -o file.o, it is replaced with file.lo - xform='[cCFSifmso]' - case $libobj in - *.ada) xform=ada ;; - *.adb) xform=adb ;; - *.ads) xform=ads ;; - *.asm) xform=asm ;; - *.c++) xform=c++ ;; - *.cc) xform=cc ;; - *.ii) xform=ii ;; - *.class) xform=class ;; - *.cpp) xform=cpp ;; - *.cxx) xform=cxx ;; - *.f90) xform=f90 ;; - *.for) xform=for ;; - *.java) xform=java ;; - esac - - libobj=`$echo "X$libobj" | $Xsed -e "s/\.$xform$/.lo/"` - - case $libobj in - *.lo) obj=`$echo "X$libobj" | $Xsed -e "$lo2o"` ;; - *) - $echo "$modename: cannot determine name of library object from \`$libobj'" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - func_infer_tag $base_compile - - for arg in $later; do - case $arg in - -static) - build_old_libs=yes - continue - ;; - - -prefer-pic) - pic_mode=yes - continue - ;; - - -prefer-non-pic) - pic_mode=no - continue - ;; - esac - done - - qlibobj=`$echo "X$libobj" | $Xsed -e "$sed_quote_subst"` - case $qlibobj in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - qlibobj="\"$qlibobj\"" ;; - esac - test "X$libobj" != "X$qlibobj" \ - && $echo "X$libobj" | grep '[]~#^*{};<>?"'"'"' &()|`$[]' \ - && $echo "$modename: libobj name \`$libobj' may not contain shell special characters." - objname=`$echo "X$obj" | $Xsed -e 's%^.*/%%'` - xdir=`$echo "X$obj" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$obj"; then - xdir= - else - xdir=$xdir/ - fi - lobj=${xdir}$objdir/$objname - - if test -z "$base_compile"; then - $echo "$modename: you must specify a compilation command" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # Delete any leftover library objects. - if test "$build_old_libs" = yes; then - removelist="$obj $lobj $libobj ${libobj}T" - else - removelist="$lobj $libobj ${libobj}T" - fi - - $run $rm $removelist - trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15 - - # On Cygwin there's no "real" PIC flag so we must build both object types - case $host_os in - cygwin* | mingw* | pw32* | os2*) - pic_mode=default - ;; - esac - if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then - # non-PIC code in shared libraries is not supported - pic_mode=default - fi - - # Calculate the filename of the output object if compiler does - # not support -o with -c - if test "$compiler_c_o" = no; then - output_obj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext} - lockfile="$output_obj.lock" - removelist="$removelist $output_obj $lockfile" - trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15 - else - output_obj= - need_locks=no - lockfile= - fi - - # Lock this critical section if it is needed - # We use this script file to make the link, it avoids creating a new file - if test "$need_locks" = yes; then - until $run ln "$srcfile" "$lockfile" 2>/dev/null; do - $show "Waiting for $lockfile to be removed" - sleep 2 - done - elif test "$need_locks" = warn; then - if test -f "$lockfile"; then - $echo "\ -*** ERROR, $lockfile exists and contains: -`cat $lockfile 2>/dev/null` - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $run $rm $removelist - exit $EXIT_FAILURE - fi - $echo "$srcfile" > "$lockfile" - fi - - if test -n "$fix_srcfile_path"; then - eval srcfile=\"$fix_srcfile_path\" - fi - qsrcfile=`$echo "X$srcfile" | $Xsed -e "$sed_quote_subst"` - case $qsrcfile in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - qsrcfile="\"$qsrcfile\"" ;; - esac - - $run $rm "$libobj" "${libobj}T" - - # Create a libtool object file (analogous to a ".la" file), - # but don't create it if we're doing a dry run. - test -z "$run" && cat > ${libobj}T </dev/null`" != "X$srcfile"; then - $echo "\ -*** ERROR, $lockfile contains: -`cat $lockfile 2>/dev/null` - -but it should contain: -$srcfile - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $run $rm $removelist - exit $EXIT_FAILURE - fi - - # Just move the object if needed, then go on to compile the next one - if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then - $show "$mv $output_obj $lobj" - if $run $mv $output_obj $lobj; then : - else - error=$? - $run $rm $removelist - exit $error - fi - fi - - # Append the name of the PIC object to the libtool object file. - test -z "$run" && cat >> ${libobj}T <> ${libobj}T </dev/null`" != "X$srcfile"; then - $echo "\ -*** ERROR, $lockfile contains: -`cat $lockfile 2>/dev/null` - -but it should contain: -$srcfile - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $run $rm $removelist - exit $EXIT_FAILURE - fi - - # Just move the object if needed - if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then - $show "$mv $output_obj $obj" - if $run $mv $output_obj $obj; then : - else - error=$? - $run $rm $removelist - exit $error - fi - fi - - # Append the name of the non-PIC object the libtool object file. - # Only append if the libtool object file exists. - test -z "$run" && cat >> ${libobj}T <> ${libobj}T <&2 - fi - if test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=yes - else - if test -z "$pic_flag" && test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=built - fi - build_libtool_libs=no - build_old_libs=yes - break - ;; - esac - done - - # See if our shared archives depend on static archives. - test -n "$old_archive_from_new_cmds" && build_old_libs=yes - - # Go through the arguments, transforming them on the way. - while test "$#" -gt 0; do - arg="$1" - shift - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - qarg=\"`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`\" ### testsuite: skip nested quoting test - ;; - *) qarg=$arg ;; - esac - libtool_args="$libtool_args $qarg" - - # If the previous option needs an argument, assign it. - if test -n "$prev"; then - case $prev in - output) - compile_command="$compile_command @OUTPUT@" - finalize_command="$finalize_command @OUTPUT@" - ;; - esac - - case $prev in - dlfiles|dlprefiles) - if test "$preload" = no; then - # Add the symbol object into the linking commands. - compile_command="$compile_command @SYMFILE@" - finalize_command="$finalize_command @SYMFILE@" - preload=yes - fi - case $arg in - *.la | *.lo) ;; # We handle these cases below. - force) - if test "$dlself" = no; then - dlself=needless - export_dynamic=yes - fi - prev= - continue - ;; - self) - if test "$prev" = dlprefiles; then - dlself=yes - elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then - dlself=yes - else - dlself=needless - export_dynamic=yes - fi - prev= - continue - ;; - *) - if test "$prev" = dlfiles; then - dlfiles="$dlfiles $arg" - else - dlprefiles="$dlprefiles $arg" - fi - prev= - continue - ;; - esac - ;; - expsyms) - export_symbols="$arg" - if test ! -f "$arg"; then - $echo "$modename: symbol file \`$arg' does not exist" - exit $EXIT_FAILURE - fi - prev= - continue - ;; - expsyms_regex) - export_symbols_regex="$arg" - prev= - continue - ;; - inst_prefix) - inst_prefix_dir="$arg" - prev= - continue - ;; - precious_regex) - precious_files_regex="$arg" - prev= - continue - ;; - release) - release="-$arg" - prev= - continue - ;; - objectlist) - if test -f "$arg"; then - save_arg=$arg - moreargs= - for fil in `cat $save_arg` - do -# moreargs="$moreargs $fil" - arg=$fil - # A libtool-controlled object. - - # Check to see that this really is a libtool object. - if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - pic_object= - non_pic_object= - - # Read the .lo file - # If there is no directory component, then add one. - case $arg in - */* | *\\*) . $arg ;; - *) . ./$arg ;; - esac - - if test -z "$pic_object" || \ - test -z "$non_pic_object" || - test "$pic_object" = none && \ - test "$non_pic_object" = none; then - $echo "$modename: cannot find name of object for \`$arg'" 1>&2 - exit $EXIT_FAILURE - fi - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. - pic_object="$xdir$pic_object" - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - dlfiles="$dlfiles $pic_object" - prev= - continue - else - # If libtool objects are unsupported, then we need to preload. - prev=dlprefiles - fi - fi - - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. - dlprefiles="$dlprefiles $pic_object" - prev= - fi - - # A PIC object. - libobjs="$libobjs $pic_object" - arg="$pic_object" - fi - - # Non-PIC object. - if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. - non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - non_pic_objects="$non_pic_objects $non_pic_object" - if test -z "$pic_object" || test "$pic_object" = none ; then - arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. - non_pic_object="$pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - else - # Only an error if not doing a dry-run. - if test -z "$run"; then - $echo "$modename: \`$arg' is not a valid libtool object" 1>&2 - exit $EXIT_FAILURE - else - # Dry-run case. - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"` - non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"` - libobjs="$libobjs $pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - fi - done - else - $echo "$modename: link input file \`$save_arg' does not exist" - exit $EXIT_FAILURE - fi - arg=$save_arg - prev= - continue - ;; - rpath | xrpath) - # We need an absolute path. - case $arg in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - $echo "$modename: only absolute run-paths are allowed" 1>&2 - exit $EXIT_FAILURE - ;; - esac - if test "$prev" = rpath; then - case "$rpath " in - *" $arg "*) ;; - *) rpath="$rpath $arg" ;; - esac - else - case "$xrpath " in - *" $arg "*) ;; - *) xrpath="$xrpath $arg" ;; - esac - fi - prev= - continue - ;; - xcompiler) - compiler_flags="$compiler_flags $qarg" - prev= - compile_command="$compile_command $qarg" - finalize_command="$finalize_command $qarg" - continue - ;; - xlinker) - linker_flags="$linker_flags $qarg" - compiler_flags="$compiler_flags $wl$qarg" - prev= - compile_command="$compile_command $wl$qarg" - finalize_command="$finalize_command $wl$qarg" - continue - ;; - xcclinker) - linker_flags="$linker_flags $qarg" - compiler_flags="$compiler_flags $qarg" - prev= - compile_command="$compile_command $qarg" - finalize_command="$finalize_command $qarg" - continue - ;; - shrext) - shrext_cmds="$arg" - prev= - continue - ;; - darwin_framework|darwin_framework_skip) - test "$prev" = "darwin_framework" && compiler_flags="$compiler_flags $arg" - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - prev= - continue - ;; - *) - eval "$prev=\"\$arg\"" - prev= - continue - ;; - esac - fi # test -n "$prev" - - prevarg="$arg" - - case $arg in - -all-static) - if test -n "$link_static_flag"; then - compile_command="$compile_command $link_static_flag" - finalize_command="$finalize_command $link_static_flag" - fi - continue - ;; - - -allow-undefined) - # FIXME: remove this flag sometime in the future. - $echo "$modename: \`-allow-undefined' is deprecated because it is the default" 1>&2 - continue - ;; - - -avoid-version) - avoid_version=yes - continue - ;; - - -dlopen) - prev=dlfiles - continue - ;; - - -dlpreopen) - prev=dlprefiles - continue - ;; - - -export-dynamic) - export_dynamic=yes - continue - ;; - - -export-symbols | -export-symbols-regex) - if test -n "$export_symbols" || test -n "$export_symbols_regex"; then - $echo "$modename: more than one -exported-symbols argument is not allowed" - exit $EXIT_FAILURE - fi - if test "X$arg" = "X-export-symbols"; then - prev=expsyms - else - prev=expsyms_regex - fi - continue - ;; - - -framework|-arch|-isysroot) - case " $CC " in - *" ${arg} ${1} "* | *" ${arg} ${1} "*) - prev=darwin_framework_skip ;; - *) compiler_flags="$compiler_flags $arg" - prev=darwin_framework ;; - esac - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - continue - ;; - - -inst-prefix-dir) - prev=inst_prefix - continue - ;; - - # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* - # so, if we see these flags be careful not to treat them like -L - -L[A-Z][A-Z]*:*) - case $with_gcc/$host in - no/*-*-irix* | /*-*-irix*) - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - ;; - esac - continue - ;; - - -L*) - dir=`$echo "X$arg" | $Xsed -e 's/^-L//'` - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - absdir=`cd "$dir" && pwd` - if test -z "$absdir"; then - $echo "$modename: cannot determine absolute directory name of \`$dir'" 1>&2 - absdir="$dir" - notinst_path="$notinst_path $dir" - fi - dir="$absdir" - ;; - esac - case "$deplibs " in - *" -L$dir "*) ;; - *) - deplibs="$deplibs -L$dir" - lib_search_path="$lib_search_path $dir" - ;; - esac - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) - testbindir=`$echo "X$dir" | $Xsed -e 's*/lib$*/bin*'` - case :$dllsearchpath: in - *":$dir:"*) ;; - *) dllsearchpath="$dllsearchpath:$dir";; - esac - case :$dllsearchpath: in - *":$testbindir:"*) ;; - *) dllsearchpath="$dllsearchpath:$testbindir";; - esac - ;; - esac - continue - ;; - - -l*) - if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos*) - # These systems don't actually have a C or math library (as such) - continue - ;; - *-*-os2*) - # These systems don't actually have a C library (as such) - test "X$arg" = "X-lc" && continue - ;; - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc due to us having libc/libc_r. - test "X$arg" = "X-lc" && continue - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C and math libraries are in the System framework - deplibs="$deplibs -framework System" - continue - ;; - *-*-sco3.2v5* | *-*-sco5v6*) - # Causes problems with __ctype - test "X$arg" = "X-lc" && continue - ;; - *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) - # Compiler inserts libc in the correct place for threads to work - test "X$arg" = "X-lc" && continue - ;; - esac - elif test "X$arg" = "X-lc_r"; then - case $host in - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc_r directly, use -pthread flag. - continue - ;; - esac - fi - deplibs="$deplibs $arg" - continue - ;; - - # Tru64 UNIX uses -model [arg] to determine the layout of C++ - # classes, name mangling, and exception handling. - -model) - compile_command="$compile_command $arg" - compiler_flags="$compiler_flags $arg" - finalize_command="$finalize_command $arg" - prev=xcompiler - continue - ;; - - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe) - compiler_flags="$compiler_flags $arg" - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - continue - ;; - - -module) - module=yes - continue - ;; - - # -64, -mips[0-9] enable 64-bit mode on the SGI compiler - # -r[0-9][0-9]* specifies the processor on the SGI compiler - # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler - # +DA*, +DD* enable 64-bit mode on the HP compiler - # -q* pass through compiler args for the IBM compiler - # -m* pass through architecture-specific compiler args for GCC - # -m*, -t[45]*, -txscale* pass through architecture-specific - # compiler args for GCC - # -pg pass through profiling flag for GCC - # @file GCC response files - -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*|-pg| \ - -t[45]*|-txscale*|@*) - - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - compiler_flags="$compiler_flags $arg" - continue - ;; - - -shrext) - prev=shrext - continue - ;; - - -no-fast-install) - fast_install=no - continue - ;; - - -no-install) - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) - # The PATH hackery in wrapper scripts is required on Windows - # in order for the loader to find any dlls it needs. - $echo "$modename: warning: \`-no-install' is ignored for $host" 1>&2 - $echo "$modename: warning: assuming \`-no-fast-install' instead" 1>&2 - fast_install=no - ;; - *) no_install=yes ;; - esac - continue - ;; - - -no-undefined) - allow_undefined=no - continue - ;; - - -objectlist) - prev=objectlist - continue - ;; - - -o) prev=output ;; - - -precious-files-regex) - prev=precious_regex - continue - ;; - - -release) - prev=release - continue - ;; - - -rpath) - prev=rpath - continue - ;; - - -R) - prev=xrpath - continue - ;; - - -R*) - dir=`$echo "X$arg" | $Xsed -e 's/^-R//'` - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - $echo "$modename: only absolute run-paths are allowed" 1>&2 - exit $EXIT_FAILURE - ;; - esac - case "$xrpath " in - *" $dir "*) ;; - *) xrpath="$xrpath $dir" ;; - esac - continue - ;; - - -static) - # The effects of -static are defined in a previous loop. - # We used to do the same as -all-static on platforms that - # didn't have a PIC flag, but the assumption that the effects - # would be equivalent was wrong. It would break on at least - # Digital Unix and AIX. - continue - ;; - - -thread-safe) - thread_safe=yes - continue - ;; - - -version-info) - prev=vinfo - continue - ;; - -version-number) - prev=vinfo - vinfo_number=yes - continue - ;; - - -Wc,*) - args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wc,//'` - arg= - save_ifs="$IFS"; IFS=',' - for flag in $args; do - IFS="$save_ifs" - case $flag in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - flag="\"$flag\"" - ;; - esac - arg="$arg $wl$flag" - compiler_flags="$compiler_flags $flag" - done - IFS="$save_ifs" - arg=`$echo "X$arg" | $Xsed -e "s/^ //"` - ;; - - -Wl,*) - args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wl,//'` - arg= - save_ifs="$IFS"; IFS=',' - for flag in $args; do - IFS="$save_ifs" - case $flag in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - flag="\"$flag\"" - ;; - esac - arg="$arg $wl$flag" - compiler_flags="$compiler_flags $wl$flag" - linker_flags="$linker_flags $flag" - done - IFS="$save_ifs" - arg=`$echo "X$arg" | $Xsed -e "s/^ //"` - ;; - - -Xcompiler) - prev=xcompiler - continue - ;; - - -Xlinker) - prev=xlinker - continue - ;; - - -XCClinker) - prev=xcclinker - continue - ;; - - # Some other compiler flag. - -* | +*) - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - ;; - - *.$objext) - # A standard object. - objs="$objs $arg" - ;; - - *.lo) - # A libtool-controlled object. - - # Check to see that this really is a libtool object. - if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - pic_object= - non_pic_object= - - # Read the .lo file - # If there is no directory component, then add one. - case $arg in - */* | *\\*) . $arg ;; - *) . ./$arg ;; - esac - - if test -z "$pic_object" || \ - test -z "$non_pic_object" || - test "$pic_object" = none && \ - test "$non_pic_object" = none; then - $echo "$modename: cannot find name of object for \`$arg'" 1>&2 - exit $EXIT_FAILURE - fi - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. - pic_object="$xdir$pic_object" - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - dlfiles="$dlfiles $pic_object" - prev= - continue - else - # If libtool objects are unsupported, then we need to preload. - prev=dlprefiles - fi - fi - - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. - dlprefiles="$dlprefiles $pic_object" - prev= - fi - - # A PIC object. - libobjs="$libobjs $pic_object" - arg="$pic_object" - fi - - # Non-PIC object. - if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. - non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - non_pic_objects="$non_pic_objects $non_pic_object" - if test -z "$pic_object" || test "$pic_object" = none ; then - arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. - non_pic_object="$pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - else - # Only an error if not doing a dry-run. - if test -z "$run"; then - $echo "$modename: \`$arg' is not a valid libtool object" 1>&2 - exit $EXIT_FAILURE - else - # Dry-run case. - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"` - non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"` - libobjs="$libobjs $pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - fi - ;; - - *.$libext) - # An archive. - deplibs="$deplibs $arg" - old_deplibs="$old_deplibs $arg" - continue - ;; - - *.la) - # A libtool-controlled library. - - if test "$prev" = dlfiles; then - # This library was specified with -dlopen. - dlfiles="$dlfiles $arg" - prev= - elif test "$prev" = dlprefiles; then - # The library was specified with -dlpreopen. - dlprefiles="$dlprefiles $arg" - prev= - else - deplibs="$deplibs $arg" - fi - continue - ;; - - # Some other compiler argument. - *) - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - ;; - esac # arg - - # Now actually substitute the argument into the commands. - if test -n "$arg"; then - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - fi - done # argument parsing loop - - if test -n "$prev"; then - $echo "$modename: the \`$prevarg' option requires an argument" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then - eval arg=\"$export_dynamic_flag_spec\" - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - fi - - oldlibs= - # calculate the name of the file, without its directory - outputname=`$echo "X$output" | $Xsed -e 's%^.*/%%'` - libobjs_save="$libobjs" - - if test -n "$shlibpath_var"; then - # get the directories listed in $shlibpath_var - eval shlib_search_path=\`\$echo \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\` - else - shlib_search_path= - fi - eval sys_lib_search_path=\"$sys_lib_search_path_spec\" - eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" - - output_objdir=`$echo "X$output" | $Xsed -e 's%/[^/]*$%%'` - if test "X$output_objdir" = "X$output"; then - output_objdir="$objdir" - else - output_objdir="$output_objdir/$objdir" - fi - # Create the object directory. - if test ! -d "$output_objdir"; then - $show "$mkdir $output_objdir" - $run $mkdir $output_objdir - exit_status=$? - if test "$exit_status" -ne 0 && test ! -d "$output_objdir"; then - exit $exit_status - fi - fi - - # Determine the type of output - case $output in - "") - $echo "$modename: you must specify an output file" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - ;; - *.$libext) linkmode=oldlib ;; - *.lo | *.$objext) linkmode=obj ;; - *.la) linkmode=lib ;; - *) linkmode=prog ;; # Anything else should be a program. - esac - - case $host in - *cygwin* | *mingw* | *pw32*) - # don't eliminate duplications in $postdeps and $predeps - duplicate_compiler_generated_deps=yes - ;; - *) - duplicate_compiler_generated_deps=$duplicate_deps - ;; - esac - specialdeplibs= - - libs= - # Find all interdependent deplibs by searching for libraries - # that are linked more than once (e.g. -la -lb -la) - for deplib in $deplibs; do - if test "X$duplicate_deps" = "Xyes" ; then - case "$libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - libs="$libs $deplib" - done - - if test "$linkmode" = lib; then - libs="$predeps $libs $compiler_lib_search_path $postdeps" - - # Compute libraries that are listed more than once in $predeps - # $postdeps and mark them as special (i.e., whose duplicates are - # not to be eliminated). - pre_post_deps= - if test "X$duplicate_compiler_generated_deps" = "Xyes" ; then - for pre_post_dep in $predeps $postdeps; do - case "$pre_post_deps " in - *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; - esac - pre_post_deps="$pre_post_deps $pre_post_dep" - done - fi - pre_post_deps= - fi - - deplibs= - newdependency_libs= - newlib_search_path= - need_relink=no # whether we're linking any uninstalled libtool libraries - notinst_deplibs= # not-installed libtool libraries - case $linkmode in - lib) - passes="conv link" - for file in $dlfiles $dlprefiles; do - case $file in - *.la) ;; - *) - $echo "$modename: libraries can \`-dlopen' only libtool libraries: $file" 1>&2 - exit $EXIT_FAILURE - ;; - esac - done - ;; - prog) - compile_deplibs= - finalize_deplibs= - alldeplibs=no - newdlfiles= - newdlprefiles= - passes="conv scan dlopen dlpreopen link" - ;; - *) passes="conv" - ;; - esac - for pass in $passes; do - if test "$linkmode,$pass" = "lib,link" || - test "$linkmode,$pass" = "prog,scan"; then - libs="$deplibs" - deplibs= - fi - if test "$linkmode" = prog; then - case $pass in - dlopen) libs="$dlfiles" ;; - dlpreopen) libs="$dlprefiles" ;; - link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; - esac - fi - if test "$pass" = dlopen; then - # Collect dlpreopened libraries - save_deplibs="$deplibs" - deplibs= - fi - for deplib in $libs; do - lib= - found=no - case $deplib in - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe) - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - compiler_flags="$compiler_flags $deplib" - fi - continue - ;; - -l*) - if test "$linkmode" != lib && test "$linkmode" != prog; then - $echo "$modename: warning: \`-l' is ignored for archives/objects" 1>&2 - continue - fi - name=`$echo "X$deplib" | $Xsed -e 's/^-l//'` - for searchdir in $newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path; do - for search_ext in .la $std_shrext .so .a; do - # Search the libtool library - lib="$searchdir/lib${name}${search_ext}" - if test -f "$lib"; then - if test "$search_ext" = ".la"; then - found=yes - else - found=no - fi - break 2 - fi - done - done - if test "$found" != yes; then - # deplib doesn't seem to be a libtool library - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" - test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - else # deplib is a libtool library - # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, - # We need to do some special things here, and not later. - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $deplib "*) - if (${SED} -e '2q' $lib | - grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - library_names= - old_library= - case $lib in - */* | *\\*) . $lib ;; - *) . ./$lib ;; - esac - for l in $old_library $library_names; do - ll="$l" - done - if test "X$ll" = "X$old_library" ; then # only static version available - found=no - ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'` - test "X$ladir" = "X$lib" && ladir="." - lib=$ladir/$old_library - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" - test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - fi - fi - ;; - *) ;; - esac - fi - fi - ;; # -l - -L*) - case $linkmode in - lib) - deplibs="$deplib $deplibs" - test "$pass" = conv && continue - newdependency_libs="$deplib $newdependency_libs" - newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'` - ;; - prog) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi - if test "$pass" = scan; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - fi - newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'` - ;; - *) - $echo "$modename: warning: \`-L' is ignored for archives/objects" 1>&2 - ;; - esac # linkmode - continue - ;; # -L - -R*) - if test "$pass" = link; then - dir=`$echo "X$deplib" | $Xsed -e 's/^-R//'` - # Make sure the xrpath contains only unique directories. - case "$xrpath " in - *" $dir "*) ;; - *) xrpath="$xrpath $dir" ;; - esac - fi - deplibs="$deplib $deplibs" - continue - ;; - *.la) lib="$deplib" ;; - *.$libext) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi - case $linkmode in - lib) - valid_a_lib=no - case $deplibs_check_method in - match_pattern*) - set dummy $deplibs_check_method - match_pattern_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"` - if eval $echo \"$deplib\" 2>/dev/null \ - | $SED 10q \ - | $EGREP "$match_pattern_regex" > /dev/null; then - valid_a_lib=yes - fi - ;; - pass_all) - valid_a_lib=yes - ;; - esac - if test "$valid_a_lib" != yes; then - $echo - $echo "*** Warning: Trying to link with static lib archive $deplib." - $echo "*** I have the capability to make that library automatically link in when" - $echo "*** you link to this library. But I can only do this if you have a" - $echo "*** shared version of the library, which you do not appear to have" - $echo "*** because the file extensions .$libext of this argument makes me believe" - $echo "*** that it is just a static archive that I should not used here." - else - $echo - $echo "*** Warning: Linking the shared library $output against the" - $echo "*** static library $deplib is not portable!" - deplibs="$deplib $deplibs" - fi - continue - ;; - prog) - if test "$pass" != link; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - fi - continue - ;; - esac # linkmode - ;; # *.$libext - *.lo | *.$objext) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - elif test "$linkmode" = prog; then - if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then - # If there is no dlopen support or we're linking statically, - # we need to preload. - newdlprefiles="$newdlprefiles $deplib" - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - newdlfiles="$newdlfiles $deplib" - fi - fi - continue - ;; - %DEPLIBS%) - alldeplibs=yes - continue - ;; - esac # case $deplib - if test "$found" = yes || test -f "$lib"; then : - else - $echo "$modename: cannot find the library \`$lib' or unhandled argument \`$deplib'" 1>&2 - exit $EXIT_FAILURE - fi - - # Check to see that this really is a libtool archive. - if (${SED} -e '2q' $lib | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then : - else - $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 - exit $EXIT_FAILURE - fi - - ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'` - test "X$ladir" = "X$lib" && ladir="." - - dlname= - dlopen= - dlpreopen= - libdir= - library_names= - old_library= - # If the library was installed with an old release of libtool, - # it will not redefine variables installed, or shouldnotlink - installed=yes - shouldnotlink=no - avoidtemprpath= - - - # Read the .la file - case $lib in - */* | *\\*) . $lib ;; - *) . ./$lib ;; - esac - - if test "$linkmode,$pass" = "lib,link" || - test "$linkmode,$pass" = "prog,scan" || - { test "$linkmode" != prog && test "$linkmode" != lib; }; then - test -n "$dlopen" && dlfiles="$dlfiles $dlopen" - test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" - fi - - if test "$pass" = conv; then - # Only check for convenience libraries - deplibs="$lib $deplibs" - if test -z "$libdir"; then - if test -z "$old_library"; then - $echo "$modename: cannot find name of link library for \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - # It is a libtool convenience library, so add in its objects. - convenience="$convenience $ladir/$objdir/$old_library" - old_convenience="$old_convenience $ladir/$objdir/$old_library" - tmp_libs= - for deplib in $dependency_libs; do - deplibs="$deplib $deplibs" - if test "X$duplicate_deps" = "Xyes" ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done - elif test "$linkmode" != prog && test "$linkmode" != lib; then - $echo "$modename: \`$lib' is not a convenience library" 1>&2 - exit $EXIT_FAILURE - fi - continue - fi # $pass = conv - - - # Get the name of the library we link against. - linklib= - for l in $old_library $library_names; do - linklib="$l" - done - if test -z "$linklib"; then - $echo "$modename: cannot find name of link library for \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - - # This library was specified with -dlopen. - if test "$pass" = dlopen; then - if test -z "$libdir"; then - $echo "$modename: cannot -dlopen a convenience library: \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - if test -z "$dlname" || - test "$dlopen_support" != yes || - test "$build_libtool_libs" = no; then - # If there is no dlname, no dlopen support or we're linking - # statically, we need to preload. We also need to preload any - # dependent libraries so libltdl's deplib preloader doesn't - # bomb out in the load deplibs phase. - dlprefiles="$dlprefiles $lib $dependency_libs" - else - newdlfiles="$newdlfiles $lib" - fi - continue - fi # $pass = dlopen - - # We need an absolute path. - case $ladir in - [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; - *) - abs_ladir=`cd "$ladir" && pwd` - if test -z "$abs_ladir"; then - $echo "$modename: warning: cannot determine absolute directory name of \`$ladir'" 1>&2 - $echo "$modename: passing it literally to the linker, although it might fail" 1>&2 - abs_ladir="$ladir" - fi - ;; - esac - laname=`$echo "X$lib" | $Xsed -e 's%^.*/%%'` - - # Find the relevant object directory and library name. - if test "X$installed" = Xyes; then - if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then - $echo "$modename: warning: library \`$lib' was moved." 1>&2 - dir="$ladir" - absdir="$abs_ladir" - libdir="$abs_ladir" - else - dir="$libdir" - absdir="$libdir" - fi - test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes - else - if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then - dir="$ladir" - absdir="$abs_ladir" - # Remove this search path later - notinst_path="$notinst_path $abs_ladir" - else - dir="$ladir/$objdir" - absdir="$abs_ladir/$objdir" - # Remove this search path later - notinst_path="$notinst_path $abs_ladir" - fi - fi # $installed = yes - name=`$echo "X$laname" | $Xsed -e 's/\.la$//' -e 's/^lib//'` - - # This library was specified with -dlpreopen. - if test "$pass" = dlpreopen; then - if test -z "$libdir"; then - $echo "$modename: cannot -dlpreopen a convenience library: \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - # Prefer using a static library (so that no silly _DYNAMIC symbols - # are required to link). - if test -n "$old_library"; then - newdlprefiles="$newdlprefiles $dir/$old_library" - # Otherwise, use the dlname, so that lt_dlopen finds it. - elif test -n "$dlname"; then - newdlprefiles="$newdlprefiles $dir/$dlname" - else - newdlprefiles="$newdlprefiles $dir/$linklib" - fi - fi # $pass = dlpreopen - - if test -z "$libdir"; then - # Link the convenience library - if test "$linkmode" = lib; then - deplibs="$dir/$old_library $deplibs" - elif test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$dir/$old_library $compile_deplibs" - finalize_deplibs="$dir/$old_library $finalize_deplibs" - else - deplibs="$lib $deplibs" # used for prog,scan pass - fi - continue - fi - - - if test "$linkmode" = prog && test "$pass" != link; then - newlib_search_path="$newlib_search_path $ladir" - deplibs="$lib $deplibs" - - linkalldeplibs=no - if test "$link_all_deplibs" != no || test -z "$library_names" || - test "$build_libtool_libs" = no; then - linkalldeplibs=yes - fi - - tmp_libs= - for deplib in $dependency_libs; do - case $deplib in - -L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test - esac - # Need to link against all dependency_libs? - if test "$linkalldeplibs" = yes; then - deplibs="$deplib $deplibs" - else - # Need to hardcode shared library paths - # or/and link against static libraries - newdependency_libs="$deplib $newdependency_libs" - fi - if test "X$duplicate_deps" = "Xyes" ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done # for deplib - continue - fi # $linkmode = prog... - - if test "$linkmode,$pass" = "prog,link"; then - if test -n "$library_names" && - { test "$prefer_static_libs" = no || test -z "$old_library"; }; then - # We need to hardcode the library path - if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then - # Make sure the rpath contains only unique directories. - case "$temp_rpath " in - *" $dir "*) ;; - *" $absdir "*) ;; - *) temp_rpath="$temp_rpath $absdir" ;; - esac - fi - - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. - case " $sys_lib_dlsearch_path " in - *" $absdir "*) ;; - *) - case "$compile_rpath " in - *" $absdir "*) ;; - *) compile_rpath="$compile_rpath $absdir" - esac - ;; - esac - case " $sys_lib_dlsearch_path " in - *" $libdir "*) ;; - *) - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" - esac - ;; - esac - fi # $linkmode,$pass = prog,link... - - if test "$alldeplibs" = yes && - { test "$deplibs_check_method" = pass_all || - { test "$build_libtool_libs" = yes && - test -n "$library_names"; }; }; then - # We only need to search for static libraries - continue - fi - fi - - link_static=no # Whether the deplib will be linked statically - use_static_libs=$prefer_static_libs - if test "$use_static_libs" = built && test "$installed" = yes ; then - use_static_libs=no - fi - if test -n "$library_names" && - { test "$use_static_libs" = no || test -z "$old_library"; }; then - if test "$installed" = no; then - notinst_deplibs="$notinst_deplibs $lib" - need_relink=yes - fi - # This is a shared library - - # Warn about portability, can't link against -module's on - # some systems (darwin) - if test "$shouldnotlink" = yes && test "$pass" = link ; then - $echo - if test "$linkmode" = prog; then - $echo "*** Warning: Linking the executable $output against the loadable module" - else - $echo "*** Warning: Linking the shared library $output against the loadable module" - fi - $echo "*** $linklib is not portable!" - fi - if test "$linkmode" = lib && - test "$hardcode_into_libs" = yes; then - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. - case " $sys_lib_dlsearch_path " in - *" $absdir "*) ;; - *) - case "$compile_rpath " in - *" $absdir "*) ;; - *) compile_rpath="$compile_rpath $absdir" - esac - ;; - esac - case " $sys_lib_dlsearch_path " in - *" $libdir "*) ;; - *) - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" - esac - ;; - esac - fi - - if test -n "$old_archive_from_expsyms_cmds"; then - # figure out the soname - set dummy $library_names - realname="$2" - shift; shift - libname=`eval \\$echo \"$libname_spec\"` - # use dlname if we got it. it's perfectly good, no? - if test -n "$dlname"; then - soname="$dlname" - elif test -n "$soname_spec"; then - # bleh windows - case $host in - *cygwin* | mingw*) - major=`expr $current - $age` - versuffix="-$major" - ;; - esac - eval soname=\"$soname_spec\" - else - soname="$realname" - fi - - # Make a new name for the extract_expsyms_cmds to use - soroot="$soname" - soname=`$echo $soroot | ${SED} -e 's/^.*\///'` - newlib="libimp-`$echo $soname | ${SED} 's/^lib//;s/\.dll$//'`.a" - - # If the library has no export list, then create one now - if test -f "$output_objdir/$soname-def"; then : - else - $show "extracting exported symbol list from \`$soname'" - save_ifs="$IFS"; IFS='~' - cmds=$extract_expsyms_cmds - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - fi - - # Create $newlib - if test -f "$output_objdir/$newlib"; then :; else - $show "generating import library for \`$soname'" - save_ifs="$IFS"; IFS='~' - cmds=$old_archive_from_expsyms_cmds - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - fi - # make sure the library variables are pointing to the new library - dir=$output_objdir - linklib=$newlib - fi # test -n "$old_archive_from_expsyms_cmds" - - if test "$linkmode" = prog || test "$mode" != relink; then - add_shlibpath= - add_dir= - add= - lib_linked=yes - case $hardcode_action in - immediate | unsupported) - if test "$hardcode_direct" = no; then - add="$dir/$linklib" - case $host in - *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; - *-*-sysv4*uw2*) add_dir="-L$dir" ;; - *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ - *-*-unixware7*) add_dir="-L$dir" ;; - *-*-darwin* ) - # if the lib is a module then we can not link against - # it, someone is ignoring the new warnings I added - if /usr/bin/file -L $add 2> /dev/null | - $EGREP ": [^:]* bundle" >/dev/null ; then - $echo "** Warning, lib $linklib is a module, not a shared library" - if test -z "$old_library" ; then - $echo - $echo "** And there doesn't seem to be a static archive available" - $echo "** The link will probably fail, sorry" - else - add="$dir/$old_library" - fi - fi - esac - elif test "$hardcode_minus_L" = no; then - case $host in - *-*-sunos*) add_shlibpath="$dir" ;; - esac - add_dir="-L$dir" - add="-l$name" - elif test "$hardcode_shlibpath_var" = no; then - add_shlibpath="$dir" - add="-l$name" - else - lib_linked=no - fi - ;; - relink) - if test "$hardcode_direct" = yes; then - add="$dir/$linklib" - elif test "$hardcode_minus_L" = yes; then - add_dir="-L$dir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) - add_dir="$add_dir -L$inst_prefix_dir$libdir" - ;; - esac - fi - add="-l$name" - elif test "$hardcode_shlibpath_var" = yes; then - add_shlibpath="$dir" - add="-l$name" - else - lib_linked=no - fi - ;; - *) lib_linked=no ;; - esac - - if test "$lib_linked" != yes; then - $echo "$modename: configuration error: unsupported hardcode properties" - exit $EXIT_FAILURE - fi - - if test -n "$add_shlibpath"; then - case :$compile_shlibpath: in - *":$add_shlibpath:"*) ;; - *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; - esac - fi - if test "$linkmode" = prog; then - test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" - test -n "$add" && compile_deplibs="$add $compile_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" - if test "$hardcode_direct" != yes && \ - test "$hardcode_minus_L" != yes && \ - test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; - esac - fi - fi - fi - - if test "$linkmode" = prog || test "$mode" = relink; then - add_shlibpath= - add_dir= - add= - # Finalize command for both is simple: just hardcode it. - if test "$hardcode_direct" = yes; then - add="$libdir/$linklib" - elif test "$hardcode_minus_L" = yes; then - add_dir="-L$libdir" - add="-l$name" - elif test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; - esac - add="-l$name" - elif test "$hardcode_automatic" = yes; then - if test -n "$inst_prefix_dir" && - test -f "$inst_prefix_dir$libdir/$linklib" ; then - add="$inst_prefix_dir$libdir/$linklib" - else - add="$libdir/$linklib" - fi - else - # We cannot seem to hardcode it, guess we'll fake it. - add_dir="-L$libdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) - add_dir="$add_dir -L$inst_prefix_dir$libdir" - ;; - esac - fi - add="-l$name" - fi - - if test "$linkmode" = prog; then - test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" - test -n "$add" && finalize_deplibs="$add $finalize_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" - fi - fi - elif test "$linkmode" = prog; then - # Here we assume that one of hardcode_direct or hardcode_minus_L - # is not unsupported. This is valid on all known static and - # shared platforms. - if test "$hardcode_direct" != unsupported; then - test -n "$old_library" && linklib="$old_library" - compile_deplibs="$dir/$linklib $compile_deplibs" - finalize_deplibs="$dir/$linklib $finalize_deplibs" - else - compile_deplibs="-l$name -L$dir $compile_deplibs" - finalize_deplibs="-l$name -L$dir $finalize_deplibs" - fi - elif test "$build_libtool_libs" = yes; then - # Not a shared library - if test "$deplibs_check_method" != pass_all; then - # We're trying link a shared library against a static one - # but the system doesn't support it. - diff --git a/tests/examplefiles/main.cmake b/tests/examplefiles/main.cmake deleted file mode 100644 index 6dfcab10..00000000 --- a/tests/examplefiles/main.cmake +++ /dev/null @@ -1,45 +0,0 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.6 FATAL_ERROR) - -SET( SOURCES back.c io.c main.c ) -SET( PATH $ENV{PATH} ) -MESSAGE( ${SOURCES} ) # three arguments, prints "back.cio.cmain.c" -MESSAGE( "${SOURCES}" ) # one argument, prints "back.c;io.c;main.c" -MESSAGE( "" ) # one argument, prints "" an empty line -MESSAGE( "${EMPTY_STRING}" ) # one argument, prints "" an empty line -MESSAGE( ${EMPTY_STRING} ) # zero arguments, causes CMake Error - # "MESSAGE called with incorrect number of arguments" -MESSAGE( \\\"\ \(\)\#\$\^ ) # this message contains literal characters - -MESSAGE( "This is practice." ) # prints "This is practice." -MESSAGE( "This;is;practice." ) # prints "This;is;practice." -MESSAGE( "Hi. ) MESSAGE( x )" ) # prints "Hi. ) MESSAGE( x )" - -MESSAGE( "Welc"ome ) # rule 1 -MESSAGE( Welc"ome" ) # rule 3 -MESSAGE( Welc"ome)" ) # rule 2 -MESSAGE( ""Thanks ) # rule 1 -MESSAGE( Thanks"" ) # rule 3 - -SET( x y A B C ) # stores "y;A;B;C" in x (without quote) -SET( ${x} ) # => SET( y;A;B;C ) => SET( y A B C) -MESSAGE( ${y} ) # prints "ABC" to stdout (without quotes) -SET( y x ) # stores "x" in y (without quotes) -SET( ${y} y = x ) # => SET( x y ) -MESSAGE( "\${x} = '${x}'" ) # prints "${x} = 'y;=;x'" to stdout (without quotes) -SET( y ${x} ) # => SET( y y = x ) => stores "y;=;x" in y (without quotes) -MESSAGE( ${y} ) # prints "y=x" to stdout (without quotes) - -SET( x a b c ) # stores "a;b;c" in x (without quotes) -SET( y "a b c" ) # stores "a b c" in y (without quotes) -MESSAGE( a b c ) # prints "abc" to stdout (without quotes) -MESSAGE( ${x} ) # prints "abc" to stdout (without quotes) -MESSAGE("${x}") # prints "a;b;c" to stdout (without quotes) -MESSAGE( ${y} ) # prints "a b c" to stdout (without quotes) -MESSAGE("${y}") # prints "a b c" to stdout (without quotes) - -# This is a comment. -COMMAND( arguments go here ) -ANOTHER_COMMAND() # this command has no arguments -YET_ANOTHER_COMMAND( these - arguments are spread # another comment - over several lines ) diff --git a/tests/examplefiles/markdown.lsp b/tests/examplefiles/markdown.lsp deleted file mode 100755 index 8159082b..00000000 --- a/tests/examplefiles/markdown.lsp +++ /dev/null @@ -1,679 +0,0 @@ -#!/usr/bin/env newlisp - -;; @module markdown -;; @author cormullion -;; @description a port of John Gruber's Markdown to newLISP -;; @location http://unbalanced-parentheses.nfshost.com/ -;; @version of date 2011-10-02 22:36:02 -;; version history: at the end -;; a port of John Gruber's Markdown.pl (http://daringfireball.net/markdown) script to newLISP... -;; see his original Perl script for explanations of the fearsome regexen and -;; byzantine logic, etc... -;; TODO: -;; the following Markdown tests fail: -;; Inline HTML (Advanced) ... FAILED -;; Links, reference style ... FAILED -- nested brackets -;; Links, shortcut references ... FAILED -;; Markdown Documentation - Syntax ... FAILED -;; Ordered and unordered lists ... FAILED -- a nested ordered list error -;; parens in url : ![this is a stupid URL](http://example.com/(parens).jpg) see (Images.text) -;; Add: email address scrambling - -(context 'Hash) -(define HashTable:HashTable) - -(define (build-escape-table) - (set '*escape-chars* [text]\`*_{}[]()>#+-.![/text]) - (dolist (c (explode *escape-chars*)) - (HashTable c (hash c)))) - -(define (init-hash txt) - ; finds a hash identifier that doesn't occur anywhere in the text - (set 'counter 0) - (set 'hash-prefix "HASH") - (set 'hash-id (string hash-prefix counter)) - (do-while (find hash-id txt) - (set 'hash-id (string hash-prefix (inc counter)))) - (Hash:build-escape-table)) - -(define (hash s) - (HashTable s (string hash-id (inc counter)))) - -(context 'markdown) - -(define (markdown:markdown txt) - (initialize) - (Hash:init-hash txt) - (unescape-special-chars - (block-transforms - (strip-link-definitions - (protect - (cleanup txt)))))) - -(define (initialize) - (set '*escape-pairs* '( - ({\\\\} {\}) - ({\\`} {`}) - ({\\\*} {*}) - ({\\_} {_}) - ([text]\\\{[/text] [text]{[/text]) - ([text]\\\}[/text] [text]}[/text]) - ({\\\[} {[}) - ({\\\]} {]}) - ({\\\(} {(}) - ({\\\)} {)}) - ({\\>} {>}) - ({\\\#} {#}) - ({\\\+} {+}) - ({\\\-} {-}) - ({\\\.} {.}) - ({\\!} {!}))) - (set '*hashed-html-blocks* '()) - (set '*list-level* 0)) - -(define (block-transforms txt) - (form-paragraphs - (protect - (block-quotes - (code-blocks - (lists - (horizontal-rules - (headers txt)))))))) - -(define (span-transforms txt) - (line-breaks - (emphasis - (amps-and-angles - (auto-links - (anchors - (images - (escape-special-chars - (escape-special-chars (code-spans txt) 'inside-attributes))))))))) - -(define (tokenize-html xhtml) -; return list of tag/text portions of xhtml text - (letn ( - (tag-match [text]((?s:)| -(?s:<\?.*?\?>)| -(?:<[a-z/!$](?:[^<>]| -(?:<[a-z/!$](?:[^<>]| -(?:<[a-z/!$](?:[^<>]| -(?:<[a-z/!$](?:[^<>]| -(?:<[a-z/!$](?:[^<>]| -(?:<[a-z/!$](?:[^<>])*>))*>))*>))*>))*>))*>))[/text]) ; yeah, well... - (str xhtml) - (len (length str)) - (pos 0) - (tokens '())) - (while (set 'tag-start (find tag-match str 8)) - (if (< pos tag-start) - (push (list 'text (slice str pos (- tag-start pos))) tokens -1)) - (push (list 'tag $0) tokens -1) - (set 'str (slice str (+ tag-start (length $0)))) - (set 'pos 0)) - ; leftovers - (if (< pos len) - (push (list 'text (slice str pos (- len pos))) tokens -1)) - tokens)) - -(define (escape-special-chars txt (within-tag-attributes nil)) - (let ((temp (tokenize-html txt)) - (new-text {})) - (dolist (pair temp) - (if (= (first pair) 'tag) - ; 'tag - (begin - (set 'new-text (replace {\\} (last pair) (HashTable {\\}) 0)) - (replace [text](?<=.)(?=.)[/text] new-text (HashTable {`}) 0) - (replace {\*} new-text (HashTable {*}) 0) - (replace {_} new-text (HashTable {_} ) 0)) - ; 'text - (if within-tag-attributes - (set 'new-text (last pair)) - (set 'new-text (encode-backslash-escapes (last pair))))) - (setf (temp $idx) (list (first pair) new-text))) - ; return as text - (join (map last temp)))) - -(define (encode-backslash-escapes t) - (dolist (pair *escape-pairs*) - (replace (first pair) t (HashTable (last pair)) 14))) - -(define (encode-code s) - ; encode/escape certain characters inside Markdown code runs - (replace {&} s "&" 0) - (replace {<} s "<" 0) - (replace {>} s ">" 0) - (replace {\*} s (HashTable {\\}) 0) - (replace {_} s (HashTable {_}) 0) - (replace "{" s (HashTable "{") 0) - (replace {\[} s (HashTable {[}) 0) - (replace {\]} s (HashTable {]}) 0) - (replace {\\} s (HashTable "\\") 0)) - -(define (code-spans s) - (replace - {(?} (encode-code (trim $2)) {
    }) - 2)) - -(define (encode-alt s) - (replace {&} s "&" 0) - (replace {"} s """ 0)) - -(define (images txt) - (let ((alt-text {}) - (url {}) - (title {}) - (ref-regex {(!\[(.*?)\][ ]?(?:\n[ ]*)?\[(.*?)\])}) - (inline-regex {(!\[(.*?)\]\([ \t]*?[ \t]*((['"])(.*?)\5[ \t]*)?\))}) - (whole-match {}) - (result {}) - (id-ref {}) - (url {})) - ; reference links ![alt text][id] - (replace - ref-regex - txt - (begin - (set 'whole-match $1 'alt-text $2 'id-ref $3) - (if alt-text - (replace {"} alt-text {"} 0)) - (if (empty? id-ref) - (set 'id-ref (lower-case alt-text))) - (if (lookup id-ref *link-database*) - (set 'url (first (lookup id-ref *link-database*))) - (set 'url nil)) - (if url - (begin - (replace {\*} url (HashTable {*}) 0) - (replace {_} url (HashTable {_}) 0) - )) - (if (last (lookup id-ref *link-database*)) - ; title - (begin - (set 'title (last (lookup id-ref *link-database*))) - (replace {"} title {"} 0) - (replace {\*} title (HashTable {*}) 0) - (replace {_} title (HashTable {_}) 0)) - ; no title - (set 'title {}) - ) - (if url - (set 'result (string - {} 
-          alt-text {})) - (set 'result whole-match)) - ) - 0 - ) - ; inline image refs: ![alt text](url "optional title") - (replace - inline-regex - txt - (begin - (set 'whole-match $1) - (set 'alt-text $2) - (set 'url $3) - (set 'title $6) - (if alt-text - (replace {"} alt-text {"} 0) - (set 'alt-text {})) - (if title - (begin - (replace {"} title {"} 0) - (replace {\*} title (HashTable {*}) 0) - (replace {_} title (HashTable {_}) 0)) - (set 'title {})) - (replace {\*} url (HashTable {*}) 0) - (replace {_} url (HashTable {_}) 0) - (string - {} 
-           alt-text {}) - ) - 0 - ) - ; empty ones are possible - (set '$1 {}) - (replace {!\[(.*?)\]\([ \t]*\)} - txt - (string {} $1 {}) - 0))) - -(define (make-anchor link-text id-ref ) -; Link defs are in the form: ^[id]: url "optional title" -; stored in link db list as (id (url title)) -; params are text to be linked and the id of the link in the db -; eg bar 1 for [bar][1] - - (let ((title {}) - (id id-ref) - (url nil)) - (if link-text - (begin - (replace {"} link-text {"} 0) - (replace {\n} link-text { } 0) - (replace {[ ]?\n} link-text { } 0))) - (if (null? id ) (set 'id (lower-case link-text))) - (if (not (nil? (lookup id *link-database*))) - (begin - (set 'url (first (lookup id *link-database*))) - (replace {\*} url (HashTable {*}) 0) - (replace {_} url (HashTable {_}) 0) - (if (set 'title (last (lookup id *link-database*))) - (begin - (replace {"} title {"} 0) - (replace {\*} title (HashTable {*}) 0) - (replace {_} title (HashTable {_}) 0)) - (set 'title {}))) - (set 'url nil)) - (if url - (string {} link-text {}) - (string {[} link-text {][} id-ref {]})))) - -(define (anchors txt) - (letn ((nested-brackets {(?>[^\[\]]+)*}) - (ref-link-regex (string {(\[(} nested-brackets {)\][ ]?(?:\n[ ]*)?\[(.*?)\])})) - (inline-regex {(\[(.*?)\]\([ ]*?[ ]*((['"])(.*?)\5[ \t]*)?\))}) - (link-text {}) - (url {}) - (title {})) - ; reference-style links: [link text] [id] - (set '$1 {} '$2 {} '$3 {} '$4 {} '$5 {} '$6 {}) ; i still don't think I should have to do this... - - ; what about this regex instead? - (set 'ref-link-regex {(\[(.*?)\][ ]?\[(.*?)\])}) - - (replace ref-link-regex txt (make-anchor $2 $3) 8) ; $2 is link text, $3 is id - ; inline links: [link text](url "optional title") - (set '$1 {} '$2 {} '$3 {} '$4 {} '$5 {} '$6 {}) - (replace - inline-regex - txt - (begin - (set 'link-text $2) - (set 'url $3) - (set 'title $6) - (if link-text (replace {"} link-text {"} 0)) - (if title - (begin - (replace {"} title {"} 0) - (replace {\*} title (HashTable {*}) 0) - (replace {_} title (HashTable {_}) 0)) - (set 'title {})) - (replace {\*} url (HashTable {*}) 0) - (replace {_} url (HashTable {_}) 0) - (replace {^<(.*)>$} url $1 0) - (string - {} link-text {} - )) - 8 - ) ; replace - ) txt) - -(define (auto-links txt) - (replace - [text]<((https?|ftp):[^'">\s]+)>[/text] - txt - (string {} $1 {}) - 0 - ) - ; to-do: email ... -) - -(define (amps-and-angles txt) -; Smart processing for ampersands and angle brackets - (replace - [text]&(?!\#?[xX]?(?:[0-9a-fA-F]+|\w+);)[/text] - txt - {&} - 10 - ) - (replace - [text]<(?![a-z/?\$!])[/text] - txt - {<} - 10)) - -(define (emphasis txt) - ; italics/bold: strong first - (replace - [text] (\*\*|__) (?=\S) (.+?[*_]*) (?<=\S) \1 [/text] - txt - (string {} $2 {}) - 8 - ) - (replace - [text] (\*|_) (?=\S) (.+?) (?<=\S) \1 [/text] - txt - (string {} $2 {}) - 8 - )) - -(define (line-breaks txt) - ; handles line break markers - (replace " {2,}\n" txt "
    \n" 0)) - -(define (hex-str-to-unicode-char strng) - ; given a five character string, assume it's "U" + 4 hex chars and convert - ; return the character... - (char (int (string "0x" (1 strng)) 0 16))) - -(define (ustring s) - ; any four digit string preceded by U - (replace "U[0-9a-f]{4,}" s (hex-str-to-unicode-char $0) 0)) - -(define (cleanup txt) - ; cleanup the text by normalizing some possible variations - (replace "\r\n|\r" txt "\n" 0) ; standardize line ends - (push "\n\n" txt -1) ; end with two returns - (set 'txt (detab txt)) ; convert tabs to spaces - - ; convert inline Unicode: - (set 'txt (ustring txt)) - (replace "\n[ \t]+\n" txt "\n\n" 0) ; lines with only spaces and tabs - ) - -(define (protect txt) - ; protect or "hash html blocks" - (letn ((nested-block-regex [text](^<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math|ins|del)\b(.*\n)*?[ \t]*(?=\n+|\Z))[/text]) - (liberal-tag-regex [text](^<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math)\b(.*\n)*?.*[ \t]*(?=\n+|\Z))[/text]) - (hr-regex [text](?:(?<=\n\n)|\A\n?)([ ]{0,3}<(hr)\b([^<>])*?/?>[ \t]*(?=\n{2,}|\Z))[/text]) - (html-comment-regex [text](?:(?<=\n\n)|\A\n?)([ ]{0,3}(?s:)[ \t]*(?=\n{2,}|\Z))[/text]) - (results '()) - (chunk-count (length (set 'chunks (parse txt "\n\n")))) - (chunk-size 500)) - - ; due to a limitation in PCRE, long sections have to be divided up otherwise we'll crash - ; so divide up long texts into chunks, then do the regex on each chunk - ; not an ideal solution, but it works ok :( - - (for (i 0 chunk-count chunk-size) - ; do a chunk - (set 'text-chunk (join (i (- (min chunk-count (- (+ i chunk-size) 1)) i) chunks) "\n\n")) - (dolist (rgx (list nested-block-regex liberal-tag-regex hr-regex html-comment-regex)) - (replace - rgx - text-chunk - (begin - (set 'key (Hash:hash $1)) - (push (list key $1 ) *hashed-html-blocks* -1) - (string "\n\n" key "\n\n")) - 2)) - ; save this partial result - (push text-chunk results -1) - ) ; for - ; return string result - (join results "\n\n"))) - -(define (unescape-special-chars t) - ; Swap back in all the special characters we've hidden. - (dolist (pair (HashTable)) - (replace (last pair) t (first pair) 10)) t) - -(define (strip-link-definitions txt) - ; strip link definitions from the text and store them - ; Link defs are in the form: ^[id]: url "optional title" - ; stored in link db list as (id (url title)) - (let ((link-db '()) - (url {}) - (id {}) - (title {})) - (replace - [text]^[ ]{0,3}\[(.+)\]:[ \t]*\n?[ \t]*?[ \t]*\n?[ \t]*(?:(?<=\s)["(](.+?)[")][ \t]*)?(?:\n+|\Z)[/text] - txt - (begin - (set 'id (lower-case $1) 'url (amps-and-angles $2) 'title $3) - (if title (replace {"} title {"} 0)) - (push (list id (list url title)) link-db) - (set '$3 {}) ; necessary? - (string {}) ; remove from text - ) - 10) - (set '*link-database* link-db) - txt)) - -(define (horizontal-rules txt) - (replace - [text]^[ ]{0,2}([ ]?\*[ ]?){3,}[ \t]*$[/text] - txt - "\n
    " - 14) - (replace - [text]^[ ]{0,2}([ ]? -[ ]?){3,}[ \t]*$[/text] - txt - "\n
    " - 14) - (replace - [text]^[ ]{0,2}([ ]? _[ ]?){3,}[ \t]*$[/text] - txt - "\n
    " - 14)) - -(define (headers txt) - ; setext headers - (let ((level 1)) - (replace - [text]^(.+)[ \t]*\n=+[ \t]*\n+[/text] - txt - (string "

    " (span-transforms $1) "

    \n\n") - 2) - - (replace - [text]^(.+)[ \t]*\n-+[ \t]*\n+[/text] - txt - (string "

    " (span-transforms $1) "

    \n\n") - 2) - ; atx headers - (replace - [text]^(\#{1,6})\s*(.+?)[ ]*\#*(\n+)[/text] - txt - (begin - (set 'level (length $1)) - (string "" (span-transforms $2) "\n\n") - ) - 2))) - -(define (lists txt) - (letn ((marker-ul {[*+-]}) - (marker-ol {\d+[.]}) - (marker-any (string {(?:} marker-ul {|} marker-ol {)})) - (whole-list-regex (string [text](([ ]{0,3}([/text] marker-any [text])[ \t]+)(?s:.+?)(\z|\n{2,}(?=\S)(?![ \t]*[/text] marker-any [text][ \t]+)))[/text])) - (my-list {}) - (list-type {}) - (my-result {})) - (replace - (if (> *list-level* 0) - (string {^} whole-list-regex) - (string {(?:(?<=\n\n)|\A\n?)} whole-list-regex)) - txt - (begin - (set 'my-list $1) - (if (find $3 marker-ul) - (set 'list-type "ul" 'marker-type marker-ul) - (set 'list-type "ol" 'marker-type marker-ol)) - (replace [text]\n{2,}[/text] my-list "\n\n\n" 0) - (set 'my-result (process-list-items my-list marker-any)) - (replace {\s+$} my-result {} 0) - (string {<} list-type {>} "\n" my-result "\n" {} "\n")) - 10 ; must be multiline - ))) - -(define (process-list-items list-text marker-any) - (let ((list-regex (string [text](\n)?(^[ \t]*)([/text] marker-any [text])[ \t]+((?s:.+?)(\n{1,2}))(?=\n*(\z|\2([/text] marker-any [text])[ \t]+))[/text])) - (item {}) - (leading-line {}) - (leading-space {}) - (result {})) - (inc *list-level*) - (replace [text]\n{2,}\z[/text] list-text "\n" 0) - (set '$1 {} '$2 {} '$3 {} '$4 {} '$5 {}) - (replace - list-regex - list-text - (begin - (set 'item $4) - (set 'leading-line $1) - (set 'leading-space $2) - (if (or (not (empty? leading-line)) (ends-with item "\n{2,}" 0)) - (set 'item (block-transforms (outdent item))) - ; recurse for sub lists - (begin - (set 'item (lists (outdent item))) - (set 'item (span-transforms (trim item "\n"))) - )) - (string {
  • } item {
  • } "\n")) - 10) - (dec *list-level*) - list-text)) - -(define (code-blocks txt) - (let ((code-block {}) - (token-list '())) - (replace - [text](?:\n\n|\A)((?:(?:[ ]{4}|\t).*\n+)+)((?=^[ ]{0,3}\S)|\Z)[/text] - txt - (begin - (set 'code-block $1) - ; format if Nestor module is loaded and it's not marked as plain - (if (and (not (starts-with code-block " ;plain\n")) (context? Nestor)) - ; format newlisp - (begin - ; remove flag if present - (replace "[ ]{4};newlisp\n" code-block {} 0) - (set 'code-block (protect (Nestor:nlx-to-html (Nestor:my-read (trim (detab (outdent code-block)) "\n"))))) - code-block) - ; don't format - (begin - ; trim leading and trailing newlines - (replace "[ ]{4};plain\n" code-block {} 0) - (set 'code-block (trim (detab (encode-code (outdent code-block))) "\n")) - (set '$1 {}) - (set 'code-block (string "\n\n
    " code-block "\n
    \n\n"))))) - 10))) - -(define (block-quotes txt) - (let ((block-quote {})) - (replace - [text]((^[ \t]*>[ \t]?.+\n(.+\n)*\n*)+)[/text] - txt - (begin - (set 'block-quote $1) - (replace {^[ ]*>[ ]?} block-quote {} 2) - (replace {^[ ]+$} block-quote {} 2) - (set 'block-quote (block-transforms block-quote)) ; recurse - ; remove leading spaces - (replace - {(\s*
    .+?
    )} - block-quote - (trim $1) - 2) - (string "
    \n" block-quote "\n
    \n\n")) - 2))) - -(define (outdent s) - (replace [text]^(\t|[ ]{1,4})[/text] s {} 2)) - -(define (detab s) - (replace [text](.*?)\t[/text] - s - (string $1 (dup { } (- 4 (% (length $1) 4)))) - 2)) - -(define (form-paragraphs txt) - (let ((grafs '()) - (original nil)) - (set 'txt (trim txt "\n")) ; strip blank lines before and after - (set 'grafs (parse txt "\n{2,}" 0)) ; split - (dolist (p grafs) - (if (set 'original (lookup p *hashed-html-blocks*)) - ; html blocks - (setf (grafs $idx) original) - ; wrap

    tags round everything else - (setf (grafs $idx) (string {

    } (replace {^[ ]*} (span-transforms p) {} (+ 4 8 16)) {

    })))) - (join grafs "\n\n"))) - -[text] -; three command line arguments: let's hope last one is a file -(when (= 3 (length (main-args))) - (println (markdown (read-file (main-args 2)))) - (exit)) - -; hack for command-line and module loading -(set 'level (sys-info 3)) - -; if level is 2, then we're probably invoking markdown.lsp directly -; if level is > 3, then we're probably loading it into another script... - -(when (= level 2) - ; running on command line, read STDIN and execute: - (while (read-line) - (push (current-line) *stdin* -1)) - (println (markdown (join *stdin* "\n"))) - (exit)) -[/text] - -;; version 2011-09-16 16:31:29 -;; Changed to different hash routine. Profiling shows that hashing takes 40% of the execution time. -;; Unfortunately this new version is only very slightly faster. -;; Command-line arguments hack in previous version doesn't work. -;; -;; version 2011-08-18 15:04:40 -;; various fixes, and added hack for running this from the command-line: -;; echo "hi there" | newlisp markdown.lsp -;; echo "hello world" | markdown.lsp -;; cat file.text | newlisp markdown.lsp -;; -;; version 2010-11-14 17:34:52 -;; some problems in ustring. Probably remove it one day, as it's non standard... -;; -;; version 2010-10-14 18:41:38 -;; added code to work round PCRE crash in (protect ... -;; -;; version date 2010-07-10 22:20:25 -;; modified call to 'read' since lutz has changed it -;; -;; version date 2009-11-16 22:10:10 -;; fixed bug in tokenize.html -;; -;; version date 2008-10-08 18:44:46 -;; changed nth-set to setf to be version-10 ready. -;; This means that now this script will NOT work with -;; earlier versions of newLISP!!!!!!!!!!! -;; requires Nestor if you want source code colouring... -;; -;; version date 2008-08-08 16:54:56 -;; changed (unless to (if (not ... :( -;; -;; version date 2008-07-20 14:!2:29 -;; added hex-str-to-unicode-char ustring -;; -;; version date 2008-03-07 15:36:09 -;; fixed load error -;; -;; version date 2007-11-17 16:20:57 -;; added syntax colouring module -;; -;; version date 2007-11-14 09:19:42 -;; removed reliance on dostring for compatibility with 9.1 - - -; eof \ No newline at end of file diff --git a/tests/examplefiles/matlab_noreturn b/tests/examplefiles/matlab_noreturn deleted file mode 100644 index 78027827..00000000 --- a/tests/examplefiles/matlab_noreturn +++ /dev/null @@ -1,3 +0,0 @@ - function myfunc(s) - a = 1; - end diff --git a/tests/examplefiles/matlab_sample b/tests/examplefiles/matlab_sample deleted file mode 100644 index bb00b517..00000000 --- a/tests/examplefiles/matlab_sample +++ /dev/null @@ -1,34 +0,0 @@ -function zz=sample(aa) -%%%%%%%%%%%%%%%%%% -% some comments -%%%%%%%%%%%%%%%%%% - -x = 'a string'; % some 'ticks' in a comment -y = 'a string with ''interal'' quotes'; - -for i=1:20 - disp(i); -end - -a = rand(30); -b = rand(30); - -c = a .* b ./ a \ ... comment at end of line and continuation - (b .* a + b - a); - -c = a' * b'; % note: these ticks are for transpose, not quotes. - -disp('a comment symbol, %, in a string'); - -!echo abc % this isn't a comment - it's passed to system command - -function y=myfunc(x) -y = exp(x); - - {% -a block comment - %} - -function no_arg_func -fprintf('%s\n', 'function with no args') -end diff --git a/tests/examplefiles/matlabsession_sample.txt b/tests/examplefiles/matlabsession_sample.txt deleted file mode 100644 index 1b33c9c4..00000000 --- a/tests/examplefiles/matlabsession_sample.txt +++ /dev/null @@ -1,37 +0,0 @@ ->> ->> ->> a = 'okay' - -a = - -okay - ->> x = rand(3) % a matrix - -x = - - 0.8147 0.9134 0.2785 - 0.9058 0.6324 0.5469 - 0.1270 0.0975 0.9575 - ->> 1/0 - -ans = - - Inf - ->> foo -??? Undefined function or variable 'foo'. - ->> ->> ->> {cos(2*pi), 'testing'} - -ans = - - [1] 'testing' - ->> ->> ->> - diff --git a/tests/examplefiles/metagrammar.treetop b/tests/examplefiles/metagrammar.treetop deleted file mode 100644 index acd6af63..00000000 --- a/tests/examplefiles/metagrammar.treetop +++ /dev/null @@ -1,455 +0,0 @@ -module Treetop - module Compiler - grammar Metagrammar - rule treetop_file - requires:(space? require_statement)* prefix:space? module_or_grammar suffix:space? { - def compile - requires.text_value + prefix.text_value + module_or_grammar.compile + suffix.text_value - end - } - end - - rule require_statement - prefix:space? "require" [ \t]+ [^\n\r]+ [\n\r] - end - - rule module_or_grammar - module_declaration / grammar - end - - rule module_declaration - prefix:('module' space name:([A-Z] alphanumeric_char* ('::' [A-Z] alphanumeric_char*)*) space) module_contents:(module_declaration / grammar) suffix:(space 'end') { - def compile - prefix.text_value + module_contents.compile + suffix.text_value - end - - def parser_name - prefix.name.text_value+'::'+module_contents.parser_name - end - } - end - - rule grammar - 'grammar' space grammar_name space ('do' space)? declaration_sequence space? 'end' - end - - rule grammar_name - ([A-Z] alphanumeric_char*) - end - - rule declaration_sequence - head:declaration tail:(space declaration)* { - def declarations - [head] + tail - end - - def tail - super.elements.map { |elt| elt.declaration } - end - } - / - '' { - def compile(builder) - end - } - end - - rule declaration - parsing_rule / include_declaration - end - - rule include_declaration - 'include' space [A-Z] (alphanumeric_char / '::')* { - def compile(builder) - builder << text_value - end - } - end - - rule parsing_rule - 'rule' space nonterminal space ('do' space)? parsing_expression space 'end' - end - - rule parsing_expression - choice / sequence / primary - end - - rule choice - head:alternative tail:(space? '/' space? alternative)+ { - def alternatives - [head] + tail - end - - def tail - super.elements.map {|elt| elt.alternative} - end - - def inline_modules - (alternatives.map {|alt| alt.inline_modules }).flatten - end - } - end - - rule sequence - head:labeled_sequence_primary tail:(space labeled_sequence_primary)+ node_class_declarations { - def sequence_elements - [head] + tail - end - - def tail - super.elements.map {|elt| elt.labeled_sequence_primary } - end - - def inline_modules - (sequence_elements.map {|elt| elt.inline_modules}).flatten + - [sequence_element_accessor_module] + - node_class_declarations.inline_modules - end - - def inline_module_name - node_class_declarations.inline_module_name - end - } - end - - rule alternative - sequence / primary - end - - rule primary - prefix atomic { - def compile(address, builder, parent_expression=nil) - prefix.compile(address, builder, self) - end - - def prefixed_expression - atomic - end - - def inline_modules - atomic.inline_modules - end - - def inline_module_name - nil - end - } - / - prefix space? predicate_block { - def compile(address, builder, parent_expression=nil) - prefix.compile(address, builder, self) - end - def prefixed_expression - predicate_block - end - def inline_modules - [] - end - } - / - atomic suffix node_class_declarations { - def compile(address, builder, parent_expression=nil) - suffix.compile(address, builder, self) - end - - def optional_expression - atomic - end - - def node_class_name - node_class_declarations.node_class_name - end - - def inline_modules - atomic.inline_modules + node_class_declarations.inline_modules - end - - def inline_module_name - node_class_declarations.inline_module_name - end - } - / - atomic node_class_declarations { - def compile(address, builder, parent_expression=nil) - atomic.compile(address, builder, self) - end - - def node_class_name - node_class_declarations.node_class_name - end - - def inline_modules - atomic.inline_modules + node_class_declarations.inline_modules - end - - def inline_module_name - node_class_declarations.inline_module_name - end - } - end - - rule labeled_sequence_primary - label sequence_primary { - def compile(lexical_address, builder) - sequence_primary.compile(lexical_address, builder) - end - - def inline_modules - sequence_primary.inline_modules - end - - def label_name - if label.name - label.name - elsif sequence_primary.instance_of?(Nonterminal) - sequence_primary.text_value - else - nil - end - end - } - end - - rule label - (alpha_char alphanumeric_char*) ':' { - def name - elements[0].text_value - end - } - / - '' { - def name - nil - end - } - end - - rule sequence_primary - prefix atomic { - def compile(lexical_address, builder) - prefix.compile(lexical_address, builder, self) - end - - def prefixed_expression - elements[1] - end - - def inline_modules - atomic.inline_modules - end - - def inline_module_name - nil - end - } - / - prefix space? predicate_block { - def compile(address, builder, parent_expression=nil) - prefix.compile(address, builder, self) - end - def prefixed_expression - predicate_block - end - def inline_modules - [] - end - } - / - atomic suffix { - def compile(lexical_address, builder) - suffix.compile(lexical_address, builder, self) - end - - def node_class_name - nil - end - - def inline_modules - atomic.inline_modules - end - - def inline_module_name - nil - end - } - / - atomic - end - - rule suffix - repetition_suffix / optional_suffix - end - - rule optional_suffix - '?' - end - - rule node_class_declarations - node_class_expression trailing_inline_module { - def node_class_name - node_class_expression.node_class_name - end - - def inline_modules - trailing_inline_module.inline_modules - end - - def inline_module - trailing_inline_module.inline_module - end - - def inline_module_name - inline_module.module_name if inline_module - end - } - end - - rule repetition_suffix - '+' / '*' / occurrence_range - end - - rule occurrence_range - space? min:([0-9])* '..' max:([0-9])* - end - - rule prefix - '&' / '!' / '~' - end - - rule atomic - terminal - / - nonterminal - / - parenthesized_expression - end - - rule parenthesized_expression - '(' space? parsing_expression space? ')' { - def inline_modules - parsing_expression.inline_modules - end - } - end - - rule nonterminal - !keyword_inside_grammar (alpha_char alphanumeric_char*) - end - - rule terminal - quoted_string / character_class / anything_symbol - end - - rule quoted_string - (single_quoted_string / double_quoted_string) { - def string - super.text_value - end - } - end - - rule double_quoted_string - '"' string:(!'"' ("\\\\" / '\"' / .))* '"' - end - - rule single_quoted_string - "'" string:(!"'" ("\\\\" / "\\'" / .))* "'" - end - - rule character_class - '[' characters:(!']' ('\\' . / bracket_expression / !'\\' .))+ ']' { - def characters - super.text_value - end - } - end - - rule bracket_expression - '[:' '^'? ( - 'alnum' / 'alpha' / 'blank' / 'cntrl' / 'digit' / 'graph' / 'lower' / - 'print' / 'punct' / 'space' / 'upper' / 'xdigit' / 'word' - ) ':]' - end - - rule anything_symbol - '.' - end - - rule node_class_expression - space '<' (!'>' .)+ '>' { - def node_class_name - elements[2].text_value - end - } - / - '' { - def node_class_name - nil - end - } - end - - rule trailing_inline_module - space inline_module { - def inline_modules - [inline_module] - end - - def inline_module_name - inline_module.module_name - end - } - / - '' { - def inline_modules - [] - end - - def inline_module - nil - end - - def inline_module_name - nil - end - } - end - - rule predicate_block - '' inline_module - end - - rule inline_module - '{' (inline_module / ![{}] .)* '}' - end - - rule keyword_inside_grammar - ('rule' / 'end') !non_space_char - end - - rule non_space_char - !space . - end - - rule alpha_char - [A-Za-z_] - end - - rule alphanumeric_char - alpha_char / [0-9] - end - - rule space - (white / comment_to_eol)+ - end - - rule comment_to_eol - '#' (!"\n" .)* - end - - rule white - [ \t\n\r] - end - end - end -end diff --git a/tests/examplefiles/minehunt.qml b/tests/examplefiles/minehunt.qml deleted file mode 100644 index 548e7e89..00000000 --- a/tests/examplefiles/minehunt.qml +++ /dev/null @@ -1,112 +0,0 @@ - /**************************************************************************** - ** - ** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies). - ** All rights reserved. - ** Contact: Nokia Corporation (qt-info@nokia.com) - ** - ** This file is part of the QtDeclarative module of the Qt Toolkit. - ** - ** $QT_BEGIN_LICENSE:LGPL$ - ** GNU Lesser General Public License Usage - ** This file may be used under the terms of the GNU Lesser General Public - ** License version 2.1 as published by the Free Software Foundation and - ** appearing in the file LICENSE.LGPL included in the packaging of this - ** file. Please review the following information to ensure the GNU Lesser - ** General Public License version 2.1 requirements will be met: - ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. - ** - ** In addition, as a special exception, Nokia gives you certain additional - ** rights. These rights are described in the Nokia Qt LGPL Exception - ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. - ** - ** GNU General Public License Usage - ** Alternatively, this file may be used under the terms of the GNU General - ** Public License version 3.0 as published by the Free Software Foundation - ** and appearing in the file LICENSE.GPL included in the packaging of this - ** file. Please review the following information to ensure the GNU General - ** Public License version 3.0 requirements will be met: - ** http://www.gnu.org/copyleft/gpl.html. - ** - ** Other Usage - ** Alternatively, this file may be used in accordance with the terms and - ** conditions contained in a signed written agreement between you and Nokia. - ** - ** - ** - ** - ** - ** $QT_END_LICENSE$ - ** - ****************************************************************************/ - - import QtQuick 1.0 - import "MinehuntCore" 1.0 - - Item { - id: field - property int clickx: 0 - property int clicky: 0 - - width: 450; height: 450 - - Image { source: "MinehuntCore/pics/background.png"; anchors.fill: parent; fillMode: Image.Tile } - - Grid { - anchors.horizontalCenter: parent.horizontalCenter - columns: 9; spacing: 1 - - Repeater { - id: repeater - model: tiles - delegate: Tile {} - } - } - - Row { - id: gamedata - x: 20; spacing: 20 - anchors.bottom: field.bottom; anchors.bottomMargin: 15 - - Image { - source: "MinehuntCore/pics/quit.png" - scale: quitMouse.pressed ? 0.8 : 1.0 - smooth: quitMouse.pressed - y: 10 - MouseArea { - id: quitMouse - anchors.fill: parent - anchors.margins: -20 - onClicked: Qt.quit() - } - } - Column { - spacing: 2 - Image { source: "MinehuntCore/pics/bomb-color.png" } - Text { anchors.horizontalCenter: parent.horizontalCenter; color: "white"; text: numMines } - } - - Column { - spacing: 2 - Image { source: "MinehuntCore/pics/flag-color.png" } - Text { anchors.horizontalCenter: parent.horizontalCenter; color: "white"; text: numFlags } - } - } - - Image { - anchors.bottom: field.bottom; anchors.bottomMargin: 15 - anchors.right: field.right; anchors.rightMargin: 20 - source: isPlaying ? 'MinehuntCore/pics/face-smile.png' : - hasWon ? 'MinehuntCore/pics/face-smile-big.png': 'MinehuntCore/pics/face-sad.png' - - MouseArea { anchors.fill: parent; onPressed: reset() } - } - Text { - anchors.centerIn: parent; width: parent.width - 20 - horizontalAlignment: Text.AlignHCenter - wrapMode: Text.WordWrap - text: "Minehunt demo has to be compiled to run.\n\nPlease see README." - color: "white"; font.bold: true; font.pixelSize: 14 - visible: tiles == undefined - } - - } diff --git a/tests/examplefiles/minimal.ns2 b/tests/examplefiles/minimal.ns2 deleted file mode 100644 index e8a92693..00000000 --- a/tests/examplefiles/minimal.ns2 +++ /dev/null @@ -1,4 +0,0 @@ -class A = ( | a = self m. | ) ( - m = (^a isNil ifTrue: [0] ifFalse: [1]) -) -class B = C ( | b0 = 0. b1 = b0 + 1. | ) () diff --git a/tests/examplefiles/modula2_test_cases.def b/tests/examplefiles/modula2_test_cases.def deleted file mode 100644 index ce86a55b..00000000 --- a/tests/examplefiles/modula2_test_cases.def +++ /dev/null @@ -1,354 +0,0 @@ -(* Test Cases for Modula-2 Lexer *) - -(* Notes: - (1) Without dialect option nor embedded dialect tag, the lexer operates in - fallback mode, recognising the *combined* literals, punctuation symbols - and operators of all supported dialects, and the *combined* reserved - words and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10. - (1) If multiple embedded dialect tags are present, the lexer will use the - first valid tag and ignore any subsequent dialect tags in the file. - (2) An embedded dialect tag overrides any command line dialect option. *) - - -(* Testing command line dialect option *) - -(* for PIM Modula-2 : pygmentize -O full,dialect=m2pim ... - for ISO Modula-2 : pygmentize -O full,dialect=m2iso ... - for Modula-2 R10 : pygmentize -O full,dialect=m2r10 ... - for Objective Modula-2 : pygmentize -O full,dialect=objm2 ... *) - -(* for Aglet extensions : pygmentize -O full,dialect=m2iso+aglet ... - for GNU extensions : pygmentize -O full,dialect=m2pim+gm2 ... - for p1 extensions : pygmentize -O full,dialect=m2iso+p1 ... - for XDS extensions : pygmentize -O full,dialect=m2iso+xds ... - - -(* Testing embedded dialect tags *) - -(* !m2pim*) (* <-- remove whitespace before ! for PIM Modula-2 *) -(* !m2iso*) (* <-- remove whitespace before ! for ISO Modula-2 *) -(* !m2r10*) (* <-- remove whitespace before ! for Modula-2 R10 *) -(* !objm2*) (* <-- remove whitespace before ! for Objective Modula-2 *) - -(* !m2iso+aglet*) (* <-- remove whitespace before ! for Aglet extensions *) -(* !m2pim+gm2*) (* <-- remove whitespace before ! for GNU extensions *) -(* !m2iso+p1*) (* <-- remove whitespace before ! for p1 extensions *) -(* !m2iso+xds*) (* <-- remove whitespace before ! for XDS extensions *) - - -(* Dialect Indicating Names *) - -(* recognised names should be highlighted *) - -QUALIFIED (* PIM and ISO *) - -PACKEDSET (* ISO only *) - -ARGLIST (* M2 R10 and ObjM2 *) - -BYCOPY (* ObjM2 only *) - -BITSET8 (* Aglet, GNU and M2 R10 *) - -__FILE__ (* GNU only *) - -BCD (* p1 and M2 R10 *) - -SEQ (* XDS only *) - - -(* Literal Tests *) - -(* recognised literals should be rendered as one unit - unrecognised literals should be rendered as error *) - -ch := 'a'; ch := "a"; (* all dialects *) -ch := 0u20; unich := 0u2038 (* M2 R10 *) - -s := 'The cat said "meow!".'; -s := "It is eight O'clock."; - - -n := 123; n = 1000000; (* all dialects *) -n := 123; n = 1'000'000; (* M2 R10 *) - -n := 0b0110; n:= 0b0110'1100'0111; (* M2 R10 *) -n := 0xFF00; n:= 0xDEAD'BEEF'0F00; (* M2 R10 *) - -r := 1.23; r := 1000000.000001; (* all dialects *) -r := 1.23; r := 1'000'000.000'001; (* M2 R10 *) - -r := 1.234E6; r:= 1.234E-6; r := 1.234567E1000; (* PIM + ISO *) -r := 1.234e6; r:= 1.234e-6; r := 1.234'567e1'000; (* M2 R10 *) - -ch := 0377C; n := 0377B; n := 07FF0H; (* ISO + PIM *) - - -(* Non-Alphabetic Operator Tests *) - -(* supported operators should be rendered as one unit - unsupported operators should be rendered as errors *) - -a := b + c - d * e / f; (* all dialects *) - -SetDiff := A \ B; (* M2 R10 *) - -dotProduct := v1 *. v2; catArray := array1 +> array2; (* M2 R10 *) - -bool := a = b; bool := a > b; bool := a < b; -bool := a # b; bool := a >= b; bool := a <= b; - -bool := a <> b; (* PIM + ISO *) - -bool := a == b; (* M2 R10 *) - -(*&*) IF a & b THEN ... END; (* PIM + ISO *) - -(*~*) IF ~ b THEN ... END; (* PIM + ISO *) - -(*::*) int := real :: INTEGER; (* M2 R10 *) - -(*++*) FOR i++ IN range DO ... END; (* M2 R10 *) -(*--*) FOR i-- IN range DO ... END; (* M2 R10 *) - -(*^*) next := this^.next; (* all dialects *) -(*@*) next := this@.next; (* ISO *) - -(*`*) str := `NSString alloc init; (* ObjM2 *) - - -(* Punctuation Tests *) - -(* supported punctuation should be rendered as one unit - unsupported punctuation should be rendered as an error *) - -(*.*) Foo.Bar.Baz; (*..*) TYPE Sign = [-1..1] OF INTEGER; - -(*|:*) CASE foo OF | 1 : bar | 2 : bam | 3 : boo END; -(*!:*) CASE foo OF 1 : bar ! 2 : bam ! 3 : boo END; (* ISO *) - -(*[]()*) array[n] := foo(); - -(*{}*) CONST Bar = { 1, 2, 3 }; - -(*?*) TPROPERTIES = isCollection, isIndexed | isRigid?; (* M2 R10 *) - -(*~*) CONST ~ isFoobar = Foo AND Bar; (* M2 R10 *) -(*->*) isFoobar -> PROCEDURE [ABS]; (* M2 R10 *) - -(*<<>>*) GENLIB Foo FROM Template FOR Bar = <> END; (* M2 R10 *) - - -(* Single Line Comment Test *) - -(* should be rendered as comment if supported, as error if unsupported *) - -// This is a single line comment (M2 R10 + ObjM2) - - -(* Pragma Delimiter Tests *) - -(* PIM style pragma should be rendered as pragma in PIM dialects, - as multiline comment in all other dialects. *) - -(*$INLINE*) (* PIM *) - -(* ISO style pragma should be rendered as error in PIM dialects, - as pragma in all other dialects. *) - -<*INLINE*> (* all other dialects *) - - -(* Operator Substitution Test When in Algol mode *) - -IF foo # bar THEN ... END; (* # should be rendered as not equal symbol *) - -IF foo >= bar THEN ... END; (* >= should be rendered as not less symbol *) - -IF foo <= bar THEN ... END; (* <= should be rendered as not greater symbol *) - -IF foo == bar THEN ... END; (* == should be rendered as identity symbol *) - -dotProduct := v1 *. v2; (* *. should be rendered as dot product symbol *) - - -(* Reserved Words and Builtins Test *) - -(* supported reserved words and builtins should be highlighted *) - -(* reserved words common to all dialects *) - -AND ARRAY BEGIN BY CASE CONST DEFINITION DIV DO ELSE ELSIF END EXIT FOR FROM -IF IMPLEMENTATION IMPORT IN LOOP MOD MODULE NOT OF OR POINTER PROCEDURE -RECORD REPEAT RETURN SET THEN TO TYPE UNTIL VAR WHILE - -(* builtins common to all dialects *) - -ABS BOOLEAN CARDINAL CHAR CHR FALSE INTEGER LONGINT LONGREAL -MAX MIN NIL ODD ORD REAL TRUE - -(* pseudo builtins common to all dialects *) - -ADDRESS BYTE WORD ADR - - -(* additional reserved words for PIM *) - -EXPORT QUALIFIED WITH - -(* additional builtins for PIM *) - -BITSET CAP DEC DISPOSE EXCL FLOAT HALT HIGH INC INCL NEW NIL PROC SIZE TRUNC VAL - -(* additional pseudo-builtins for PIM *) - -SYSTEM PROCESS TSIZE NEWPROCESS TRANSFER - - -(* additional reserved words for ISO 10514-1 *) - -EXCEPT EXPORT FINALLY FORWARD PACKEDSET QUALIFIED REM RETRY WITH - -(* additional reserved words for ISO 10514-2 & ISO 10514-3 *) - -ABSTRACT AS CLASS GUARD INHERIT OVERRIDE READONLY REVEAL TRACED UNSAFEGUARDED - -(* additional builtins for ISO 10514-1 *) - -BITSET CAP CMPLX COMPLEX DEC DISPOSE EXCL FLOAT HALT HIGH IM INC INCL INT -INTERRUPTIBLE LENGTH LFLOAT LONGCOMPLEX NEW PROC PROTECTION RE SIZE TRUNC -UNINTERRUBTIBLE VAL - -(* additional builtins for ISO 10514-2 & ISO 10514-3 *) - -CREATE DESTROY EMPTY ISMEMBER SELF - - -(* additional pseudo-builtins for ISO *) - -(* SYSTEM *) -SYSTEM BITSPERLOC LOCSPERBYTE LOCSPERWORD LOC ADDADR SUBADR DIFADR MAKEADR -ADR ROTATE SHIFT CAST TSIZE - -(* COROUTINES *) -COROUTINES ATTACH COROUTINE CURRENT DETACH HANDLER INTERRUPTSOURCE IOTRANSFER -IsATTACHED LISTEN NEWCOROUTINE PROT TRANSFER - -(* EXCEPTIONS *) -EXCEPTIONS AllocateSource CurrentNumber ExceptionNumber ExceptionSource -GetMessage IsCurrentSource IsExceptionalExecution RAISE - -(* TERMINATION *) -TERMINATION IsTerminating HasHalted - -(* M2EXCEPTION *) -M2EXCEPTION M2Exceptions M2Exception IsM2Exception indexException rangeException -caseSelectException invalidLocation functionException wholeValueException -wholeDivException realValueException realDivException complexValueException -complexDivException protException sysException coException exException - - -(* additional reserved words for M2 R10 *) - -ALIAS ARGLIST BLUEPRINT COPY GENLIB INDETERMINATE NEW NONE OPAQUE REFERENTIAL -RELEASE RETAIN - -(* with symbolic assembler language extension *) -ASM REG - -(* additional builtins for M2 R10 *) - -CARDINAL COUNT EMPTY EXISTS INSERT LENGTH LONGCARD OCTET PTR PRED READ READNEW -REMOVE RETRIEVE SORT STORE SUBSET SUCC TLIMIT TMAX TMIN TRUE TSIZE UNICHAR -WRITE WRITEF - -(* additional pseudo-builtins for M2 R10 *) - -(* TPROPERTIES *) -TPROPERTIES PROPERTY LITERAL TPROPERTY TLITERAL TBUILTIN TDYN TREFC TNIL -TBASE TPRECISION TMAXEXP TMINEXP - -(* CONVERSION *) -CONVERSION TSXFSIZE SXF VAL - -(* UNSAFE *) -UNSAFE CAST INTRINSIC AVAIL ADD SUB ADDC SUBC FETCHADD FETCHSUB SHL SHR ASHR -ROTL ROTR ROTLC ROTRC BWNOT BWAND BWOR BWXOR BWNAND BWNOR SETBIT TESTBIT -LSBIT MSBIT CSBITS BAIL HALT TODO FFI ADDR VARGLIST VARGC - -(* ATOMIC *) -ATOMIC INTRINSIC AVAIL SWAP CAS INC DEC BWAND BWNAND BWOR BWXOR - -(* COMPILER *) -COMPILER DEBUG MODNAME PROCNAME LINENUM DEFAULT HASH - -(* ASSEMBLER *) -ASSEMBLER REGISTER SETREG GETREG CODE - - -(* standard library ADT identifiers for M2 R10 *) - -(* rendered as builtins when dialect is set to Modula-2 R10, - this can be turned off by option treat_stdlib_adts_as_builtins=off *) -BCD LONGBCD BITSET SHORTBITSET LONGBITSET LONGLONGBITSET COMPLEX LONGCOMPLEX -SHORTCARD LONGLONGCARD SHORTINT LONGLONGINT POSINT SHORTPOSINT LONGPOSINT -LONGLONGPOSINT BITSET8 BITSET16 BITSET32 BITSET64 BITSET128 BS8 BS16 BS32 -BS64 BS128 CARDINAL8 CARDINAL16 CARDINAL32 CARDINAL64 CARDINAL128 CARD8 -CARD16 CARD32 CARD64 CARD128 INTEGER8 INTEGER16 INTEGER32 INTEGER64 -INTEGER128 INT8 INT16 INT32 INT64 INT128 STRING UNISTRING - - -(* additional reserved words for ObjM2 *) - -(* Note: ObjM2 is a superset of M2 R10 *) - -BYCOPY BYREF CLASS CONTINUE CRITICAL INOUT METHOD ON OPTIONAL OUT PRIVATE -PROTECTED PROTOCOL PUBLIC SUPER TRY - -(* additional builtins for ObjM2 *) - -OBJECT NO YES - - -(* additional builtins for Aglet Extensions to ISO *) - -BITSET8 BITSET16 BITSET32 CARDINAL8 CARDINAL16 CARDINAL32 INTEGER8 INTEGER16 -INTEGER32 - - -(* additional reserved words for GNU Extensions to PIM *) - -ASM __ATTRIBUTE__ __BUILTIN__ __COLUMN__ __DATE__ __FILE__ __FUNCTION__ -__LINE__ __MODULE__ VOLATILE - -(* additional builtins for GNU Extensions to PIM *) - -BITSET8 BITSET16 BITSET32 CARDINAL8 CARDINAL16 CARDINAL32 CARDINAL64 COMPLEX32 -COMPLEX64 COMPLEX96 COMPLEX128 INTEGER8 INTEGER16 INTEGER32 INTEGER64 REAL8 -REAL16 REAL32 REAL96 REAL128 THROW - - -(* additional pseudo-builtins for p1 Extensions to ISO *) - -BCD - - -(* additional reserved words for XDS Extensions to ISO *) - -SEQ - -(* additional builtins for XDS Extensions to ISO *) - -ASH ASSERT DIFFADR_TYPE ENTIER INDEX LEN LONGCARD SHORTCARD SHORTINT - -(* additional pseudo-builtins for XDS Extensions to ISO *) - -(* SYSTEM *) -PROCESS NEWPROCESS BOOL8 BOOL16 BOOL32 CARD8 CARD16 CARD32 INT8 INT16 INT32 -REF MOVE FILL GET PUT CC int unsigned size_t void - -(* COMPILER *) -COMPILER OPTION EQUATION - - -(* end of file *) \ No newline at end of file diff --git a/tests/examplefiles/moin_SyntaxReference.txt b/tests/examplefiles/moin_SyntaxReference.txt deleted file mode 100644 index a88fea4c..00000000 --- a/tests/examplefiles/moin_SyntaxReference.txt +++ /dev/null @@ -1,340 +0,0 @@ -## Please edit system and help pages ONLY in the moinmaster wiki! For more -## information, please see MoinMaster:MoinPagesEditorGroup. -##master-page:Unknown-Page -##master-date:Unknown-Date -#acl MoinPagesEditorGroup:read,write,delete,revert All:read -#format wiki -#language en - -This page aims to introduce the most important elements of MoinMoin``'s syntax at a glance, showing first the markup verbatim and then how it is rendered by the wiki engine. Additionally, you'll find links to the relative help pages. Please note that some of the features depend on your configuration. - -= Table of Contents = -{{{ -'''Contents''' (up to the 2nd level) -[[TableOfContents(2)]] -}}} -'''Contents''' (up to the 2nd level) -[[TableOfContents(2)]] - -= Headings = -'''''see:''' HelpOnHeadlines'' -{{{ -= heading 1st level = -== heading 2nd level == -=== heading 3rd level === -==== heading 4th level ==== -===== heading 5th level ===== -}}} -= heading 1st level = -== heading 2nd level == -=== heading 3rd level === -==== heading 4th level ==== -===== heading 5th level ===== - -= Text Formatting = -'''''see:''' HelpOnFormatting'' -{{{ - * ''emphasized (italics)'' - * '''boldface''' - * '''''bold italics''''' - * `monospace` - * {{{source code}}} - * __underline__ - * ,,sub,,script - * ^super^script - * ~-smaller-~ - * ~+larger+~ - * --(strike through)-- -}}} - * ''emphasized (italics)'' - * '''boldface''' - * '''''bold italics''''' - * `monospace` - * {{{source code}}} - * __underline__ - * ,,sub,,script - * ^super^script - * ~-smaller-~ - * ~+larger+~ - * --(strike through)-- - -= Hyperlinks = -'''''see:''' HelpOnLinking'' -== Internal Links == -{{{ - * FrontPage - * ["FrontPage"] - * HelpOnEditing/SubPages - * /SubPage - * ../SiblingPage - * [:FrontPage:named link] - * [#anchorname] - * [#anchorname description] - * [wiki:Self:PageName#anchorname] - * [wiki:Self:PageName#anchorname description] - * attachment:filename.txt -}}} - * FrontPage - * ["FrontPage"] - * HelpOnEditing/SubPages - * /SubPage - * ../SiblingPage - * [:FrontPage:named link] - * [#anchorname] - * [#anchorname description] - * [wiki:Self:PageName#anchorname] - * [wiki:Self:PageName#anchorname description] - * attachment:filename.txt - -== External Links == -{{{ - * http://moinmoin.wikiwikiweb.de/ - * [http://moinmoin.wikiwikiweb.de/] - * [http://moinmoin.wikiwikiweb.de/ MoinMoin Wiki] - * [http://moinmoin.wikiwikiweb.de/wiki/moinmoin.png] - * http://moinmoin.wikiwikiweb.de/wiki/moinmoin.png - * [http://moinmoin.wikiwikiweb.de/wiki/moinmoin.png moinmoin.png] - * MeatBall:InterWiki - * wiki:MeatBall/InterWiki - * [wiki:MeatBall/InterWiki] - * [wiki:MeatBall/InterWiki InterWiki page on MeatBall] - * [file://///servername/share/full/path/to/file/filename%20with%20spaces.txt link to file filename with spaces.txt] - * user@example.com -}}} - * http://moinmoin.wikiwikiweb.de/ - * [http://moinmoin.wikiwikiweb.de/] - * [http://moinmoin.wikiwikiweb.de/ MoinMoin Wiki] - * [http://moinmoin.wikiwikiweb.de/wiki/moinmoin.png] - * http://moinmoin.wikiwikiweb.de/wiki/moinmoin.png - * [http://moinmoin.wikiwikiweb.de/wiki/moinmoin.png moinmoin.png] - * MeatBall:InterWiki - * wiki:MeatBall/InterWiki - * [wiki:MeatBall/InterWiki] - * [wiki:MeatBall/InterWiki InterWiki page on MeatBall] - * [file://///servername/share/full/path/to/file/filename%20with%20spaces.txt link to file filename with spaces.txt] - * user@example.com - -== Avoid or Limit Automatical Linking == -{{{ - * Wiki''''''Name - * Wiki``Name - * !WikiName - * WikiName''''''s - * WikiName``s - * `http://www.example.com` -}}} - * Wiki''''''Name - * Wiki``Name - * !WikiName - * WikiName''''''s - * WikiName``s - * `http://www.example.com` - -= Blockquotes and Indentions = -{{{ - indented text - text indented to the 2nd level -}}} - indented text - text indented to the 2nd level - -= Lists = -'''''see:''' HelpOnLists'' -== Unordered Lists == -{{{ - * item 1 - - * item 2 (preceding white space) - * item 2.1 - * item 2.1.1 - * item 3 - . item 3.1 (bulletless) - . item 4 (bulletless) - * item 4.1 - . item 4.1.1 (bulletless) -}}} - * item 1 - - * item 2 (preceding white space) - * item 2.1 - * item 2.1.1 - * item 3 - . item 3.1 (bulletless) - . item 4 (bulletless) - * item 4.1 - . item 4.1.1 (bulletless) - -== Ordered Lists == -=== with Numbers === -{{{ - 1. item 1 - 1. item 1.1 - 1. item 1.2 - 1. item 2 -}}} - 1. item 1 - 1. item 1.1 - 1. item 1.2 - 1. item 2 - -=== with Roman Numbers === -{{{ - I. item 1 - i. item 1.1 - i. item 1.2 - I. item 2 -}}} - I. item 1 - i. item 1.1 - i. item 1.2 - I. item 2 - -=== with Letters === -{{{ - A. item A - a. item A. a) - a. item A. b) - A. item B -}}} - A. item A - a. item A. a) - a. item A. b) - A. item B - -== Definition Lists == -{{{ - term:: definition - object:: description 1 - :: description 2 - Action Items:: - :: First Item - :: Second Item -}}} - term:: definition - object:: description 1 - :: description 2 - Action Items:: - :: First Item - :: Second Item - -= Horizontal Rules = -'''''see:''' HelpOnRules'' -{{{ ----- ------ ------- -------- --------- ---------- ----------- -}}} ----- ------ ------- -------- --------- ---------- ----------- - -= Tables = -'''''see:''' HelpOnTables'' -== Tables == -{{{ -||'''A'''||'''B'''||'''C'''|| -||1 ||2 ||3 || -}}} -||'''A'''||'''B'''||'''C'''|| -||1 ||2 ||3 || - -== Cell Width == -{{{ -||minimal width ||<99%>maximal width || -}}} -||minimal width ||<99%>maximal width || - -== Spanning Rows and Columns == -{{{ -||<|2> cell spanning 2 rows ||cell in the 2nd column || -||cell in the 2nd column of the 2nd row || -||<-2> cell spanning 2 columns || -||||use empty cells as a shorthand || -}}} -||<|2> cell spanning 2 rows ||cell in the 2nd column || -||cell in the 2nd column of the 2nd row || -||<-2> cell spanning 2 columns || -||||use empty cells as a shorthand || - -== Alignment of Cell Contents == -{{{ -||<^|3> top (combined) ||<:99%> center (combined) || bottom (combined) || -||<)> right || -||<(> left || -}}} -||<^|3> top (combined) ||<:99%> center (combined) || bottom (combined) || -||<)> right || -||<(> left || - -== Coulored Table Cells == -{{{ -||<#0000FF> blue ||<#00FF00> green ||<#FF0000> red || -||<#00FFFF> cyan ||<#FF00FF> magenta ||<#FFFF00> yellow || -}}} -||<#0000FF> blue ||<#00FF00> green ||<#FF0000> red || -||<#00FFFF> cyan ||<#FF00FF> magenta ||<#FFFF00> yellow || - -== HTML-like Options for Tables == -{{{ -||A || like <|2> || -|| like <#00FF00> || -|| like <-2>|| -}}} -||A || like <|2> || -|| like <#00FF00> || -|| like <-2>|| - -= Macros and Variables = -== Macros == -'''''see:''' HelpOnMacros'' - * `[[Anchor(anchorname)]]` inserts a link anchor `anchorname` - * `[[BR]]` inserts a hard line break - * `[[FootNote(Note)]]` inserts a footnote saying `Note` - * `[[Include(HelpOnMacros/Include)]]` inserts the contents of the page `HelpOnMacros/Include` inline - * `[[MailTo(user AT example DOT com)]]` obfuscates the email address `user@example.com` to users not logged in - -== Variables == -'''''see:''' HelpOnVariables'' - * `@``SIG``@` inserts your login name and timestamp of modification - * `@``TIME``@` inserts date and time of modification - -= Smileys and Icons = -'''''see:''' HelpOnSmileys'' -[[ShowSmileys]] - -= Source code = -'''''see:''' HelpOnParsers'' -== Verbatim Display == -{{{ -{ { { -def hello(): - print "Hello World!" -} } } -}}} -/!\ Remove spaces between "`{ { {`" and "`} } }`". -{{{ -def hello(): - print "Hello World!" -}}} - -== Syntax Highlighting == -{{{ -{ { {#!python -def hello(): - print "Hello World!" -} } } -}}} -/!\ Remove spaces between "`{ { {`" and "`} } }`". -{{{#!python -def hello(): - print "Hello World!" -}}} - diff --git a/tests/examplefiles/multiline_regexes.rb b/tests/examplefiles/multiline_regexes.rb deleted file mode 100644 index 1b1e7612..00000000 --- a/tests/examplefiles/multiline_regexes.rb +++ /dev/null @@ -1,38 +0,0 @@ -/ -this is a -multiline -regex -/ - -this /is a -multiline regex too/ - -foo = /is also -one/ - -also /4 -is one/ - -this(/ -too -/) - -# this not -2 /4 -asfsadf/ - -# this is also not one -0x4d /25 -foo/ - -42 and /this -is also a multiline -regex/ - - -# And here some special string cases -foo = % blah # comment here to ensure whitespace -foo(% blah ) -foo << % blah # stupid but has to work -foo = % blah + % blub # wicked -foo = %q wicked # works too diff --git a/tests/examplefiles/nanomsg.intr b/tests/examplefiles/nanomsg.intr deleted file mode 100644 index d21f62cc..00000000 --- a/tests/examplefiles/nanomsg.intr +++ /dev/null @@ -1,95 +0,0 @@ -module: nanomsg -synopsis: generated bindings for the nanomsg library -author: Bruce Mitchener, Jr. -copyright: See LICENSE file in this distribution. - -define simple-C-mapped-subtype () - export-map , export-function: identity; -end; - -define interface - #include { - "sp/sp.h", - "sp/fanin.h", - "sp/inproc.h", - "sp/pair.h", - "sp/reqrep.h", - "sp/survey.h", - "sp/fanout.h", - "sp/ipc.h", - "sp/pubsub.h", - "sp/tcp.h" - }, - - exclude: { - "SP_HAUSNUMERO", - "SP_PAIR_ID", - "SP_PUBSUB_ID", - "SP_REQREP_ID", - "SP_FANIN_ID", - "SP_FANOUT_ID", - "SP_SURVEY_ID" - }, - - equate: {"char *" => }, - - rename: { - "sp_recv" => %sp-recv, - "sp_send" => %sp-send, - "sp_setsockopt" => %sp-setsockopt - }; - - function "sp_version", - output-argument: 1, - output-argument: 2, - output-argument: 3; - - function "sp_send", - map-argument: { 2 => }; - - function "sp_recv", - map-argument: { 2 => }; - -end interface; - -// Function for adding the base address of the repeated slots of a -// to an offset and returning the result as a . This is -// necessary for passing contents across the FFI. - -define function buffer-offset - (the-buffer :: , data-offset :: ) - => (result-offset :: ) - u%+(data-offset, - primitive-wrap-machine-word - (primitive-repeated-slot-as-raw - (the-buffer, primitive-repeated-slot-offset(the-buffer)))) -end function; - -define inline function sp-send (socket :: , data :: , flags :: ) => (res :: ) - %sp-send(socket, buffer-offset(data, 0), data.size, flags) -end; - -define inline function sp-recv (socket :: , data :: , flags :: ) => (res :: ) - %sp-recv(socket, buffer-offset(data, 0), data.size, flags); -end; - -define inline method sp-setsockopt (socket :: , level :: , option :: , value :: ) - with-stack-structure (int :: ) - pointer-value(int) := value; - let setsockopt-result = - %sp-setsockopt(socket, level, option, int, size-of()); - if (setsockopt-result < 0) - // Check error! - end; - setsockopt-result - end; -end; - -define inline method sp-setsockopt (socket :: , level :: , option :: , data :: ) - let setsockopt-result = - %sp-setsockopt(socket, level, option, as(, data), data.size); - if (setsockopt-result < 0) - // Check error! - end; - setsockopt-result -end; diff --git a/tests/examplefiles/nasm_aoutso.asm b/tests/examplefiles/nasm_aoutso.asm deleted file mode 100644 index 9fd9727e..00000000 --- a/tests/examplefiles/nasm_aoutso.asm +++ /dev/null @@ -1,96 +0,0 @@ -; test source file for assembling to NetBSD/FreeBSD a.out shared library -; build with: -; nasm -f aoutb aoutso.asm -; ld -Bshareable -o aoutso.so aoutso.o -; test with: -; cc -o aoutso aouttest.c aoutso.so -; ./aoutso - -; This file should test the following: -; [1] Define and export a global text-section symbol -; [2] Define and export a global data-section symbol -; [3] Define and export a global BSS-section symbol -; [4] Define a non-global text-section symbol -; [5] Define a non-global data-section symbol -; [6] Define a non-global BSS-section symbol -; [7] Define a COMMON symbol -; [8] Define a NASM local label -; [9] Reference a NASM local label -; [10] Import an external symbol -; [11] Make a PC-relative call to an external symbol -; [12] Reference a text-section symbol in the text section -; [13] Reference a data-section symbol in the text section -; [14] Reference a BSS-section symbol in the text section -; [15] Reference a text-section symbol in the data section -; [16] Reference a data-section symbol in the data section -; [17] Reference a BSS-section symbol in the data section - - BITS 32 - EXTERN __GLOBAL_OFFSET_TABLE_ - GLOBAL _lrotate:function ; [1] - GLOBAL _greet:function ; [1] - GLOBAL _asmstr:data _asmstr.end-_asmstr ; [2] - GLOBAL _textptr:data 4 ; [2] - GLOBAL _selfptr:data 4 ; [2] - GLOBAL _integer:data 4 ; [3] - EXTERN _printf ; [10] - COMMON _commvar 4 ; [7] - - SECTION .text - -; prototype: long lrotate(long x, int num); -_lrotate: ; [1] - push ebp - mov ebp,esp - mov eax,[ebp+8] - mov ecx,[ebp+12] -.label rol eax,1 ; [4] [8] - loop .label ; [9] [12] - mov esp,ebp - pop ebp - ret - -; prototype: void greet(void); -_greet push ebx ; we'll use EBX for GOT, so save it - call .getgot -.getgot: pop ebx - add ebx,__GLOBAL_OFFSET_TABLE_ + $$ - .getgot wrt ..gotpc - mov eax,[ebx+_integer wrt ..got] ; [14] - mov eax,[eax] - inc eax - mov [ebx+localint wrt ..gotoff],eax ; [14] - mov eax,[ebx+_commvar wrt ..got] - push dword [eax] - mov eax,[ebx+localptr wrt ..gotoff] ; [13] - push dword [eax] - mov eax,[ebx+_integer wrt ..got] ; [1] [14] - push dword [eax] - lea eax,[ebx+_printfstr wrt ..gotoff] - push eax ; [13] - call _printf wrt ..plt ; [11] - add esp,16 - pop ebx - ret - - SECTION .data - -; a string -_asmstr db 'hello, world', 0 ; [2] -.end - -; a string for Printf -_printfstr db "integer==%d, localint==%d, commvar=%d" - db 10, 0 - -; some pointers -localptr dd localint ; [5] [17] -_textptr dd _greet wrt ..sym ; [15] -_selfptr dd _selfptr wrt ..sym ; [16] - - SECTION .bss - -; an integer -_integer resd 1 ; [3] - -; a local integer -localint resd 1 ; [6] diff --git a/tests/examplefiles/nasm_objexe.asm b/tests/examplefiles/nasm_objexe.asm deleted file mode 100644 index dcae5eed..00000000 --- a/tests/examplefiles/nasm_objexe.asm +++ /dev/null @@ -1,30 +0,0 @@ -; Demonstration of how to write an entire .EXE format program as a .OBJ -; file to be linked. Tested with the VAL free linker. -; To build: -; nasm -fobj objexe.asm -; val objexe.obj,objexe.exe; -; To test: -; objexe -; (should print `hello, world') - - segment code - -..start: mov ax,data - mov ds,ax - mov ax,stack - mov ss,ax - mov sp,stacktop - - mov dx,hello - mov ah,9 - int 0x21 - - mov ax,0x4c00 - int 0x21 - - segment data -hello: db 'hello, world', 13, 10, '$' - - segment stack stack - resb 64 -stacktop: diff --git a/tests/examplefiles/nemerle_sample.n b/tests/examplefiles/nemerle_sample.n deleted file mode 100644 index 5236857d..00000000 --- a/tests/examplefiles/nemerle_sample.n +++ /dev/null @@ -1,87 +0,0 @@ -using System; - -namespace Demo.Ns -{ - /// sample class - public class ClassSample : Base - { - /* sample multiline comment */ -#region region sample - fieldSample : int; -#endregion - - public virtual someMethod(str : string) : list[double] - { - def x = "simple string"; - def x = $"simple $splice string $(spliceMethod() + 1)"; - def x = <# - recursive <# string #> sample - #>; - def x = $<# - recursive $splice <# string #> sample - ..$(lst; "; "; x => $"x * 2 = $(x * 2)") str - #>; - def x = @"somestring \"; - - def localFunc(arg) - { - arg + 1; - } - - match (localFunc(2)) - { - | 3 => "ok"; - | _ => "fail"; - } - - using (x = SomeObject()) - { - foreach (item in someCollection) - { - def i = try - { - int.Parse(item) - } - catch - { - | _ is FormatException => 0; - } - when (i > 0xff) - unless (i < 555L) - WriteLine(i); - - } - } - protected override overrideSample() : void - {} - - private privateSample() : void - {} - - public abstract abstractSample() : void - {} - } - - } - - module ModuleSample - { - } - - variant RgbColor { - | Red - | Yellow - | Green - | Different { - red : float; - green : float; - blue : float; - } - } - - macro sampleMacro(expr) - syntax ("write", expr) - { - <[ WriteLine($(expr : dyn)) ]> - } -} diff --git a/tests/examplefiles/nginx_nginx.conf b/tests/examplefiles/nginx_nginx.conf deleted file mode 100644 index 9dcdc8ab..00000000 --- a/tests/examplefiles/nginx_nginx.conf +++ /dev/null @@ -1,118 +0,0 @@ - -#user nobody; -worker_processes 1; - -#error_log logs/error.log; -#error_log logs/error.log notice; -#error_log logs/error.log info; - -#pid logs/nginx.pid; - - -events { - worker_connections 1024; -} - - -http { - include mime.types; - default_type application/octet-stream; - - log_format main '$remote_addr - $remote_user [$time_local] $request ' - '"$status" $body_bytes_sent "$http_referer" ' - '"$http_user_agent" "$http_x_forwarded_for"'; - - #access_log logs/access.log main; - - sendfile on; - #tcp_nopush on; - - #keepalive_timeout 0; - keepalive_timeout 65; - - #gzip on; - - server { - listen 80; - server_name localhost; - - charset koi8-r; - - #access_log logs/host.access.log main; - - location / { - root html; - index index.html index.htm; - } - - #error_page 404 /404.html; - - # redirect server error pages to the static page /50x.html - # - error_page 500 502 503 504 /50x.html; - location = /50x.html { - root html; - } - - # proxy the PHP scripts to Apache listening on 127.0.0.1:80 - # - location ~ \.php$ { - proxy_pass http://127.0.0.1; - } - - # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000 - # - location ~ \.php$ { - root html; - fastcgi_pass 127.0.0.1:9000; - fastcgi_index index.php; - fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name; - include fastcgi_params; - } - - # deny access to .htaccess files, if Apache's document root - # concurs with nginx's one - # - location ~ /\.ht { - deny all; - } - } - - - # another virtual host using mix of IP-, name-, and port-based configuration - # - server { - listen 8000; - listen somename:8080; - server_name somename alias another.alias; - - location / { - root html; - index index.html index.htm; - } - } - - - # HTTPS server - # - server { - listen 443; - server_name localhost; - - ssl on; - ssl_certificate cert.pem; - ssl_certificate_key cert.key; - - ssl_session_timeout 5m; - - ssl_protocols SSLv2 SSLv3 TLSv1; - ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv2:+EXP; - ssl_prefer_server_ciphers on; - - location / { - root html; - index index.html index.htm; - } - } - -} diff --git a/tests/examplefiles/noexcept.cpp b/tests/examplefiles/noexcept.cpp deleted file mode 100644 index f83e50db..00000000 --- a/tests/examplefiles/noexcept.cpp +++ /dev/null @@ -1,8 +0,0 @@ -void* operator new (std::size_t size); -void* operator new (std::size_t size, const std::nothrow_t& nothrow_value) noexcept; -void* operator new (std::size_t size, const std::nothrow_t& nothrow_value)noexcept; -void* operator new (std::size_t size, const std::nothrow_t& nothrow_value); -void* operator new (std::size_t size); -void* operator new (std::size_t size) noexcept; -void* operator new (std::size_t size)noexcept; - diff --git a/tests/examplefiles/numbers.c b/tests/examplefiles/numbers.c deleted file mode 100644 index 80662ead..00000000 --- a/tests/examplefiles/numbers.c +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Some Number Test - */ - -int i = 24241424; -float f1 = 342423423.24234; -float f2 = 25235235.; -float f3 = .234234; -float f4 = 234243e+34343; -float f5 = 24234e-234; -int o = 0234; -int h = 0x2342; diff --git a/tests/examplefiles/objc_example.m b/tests/examplefiles/objc_example.m deleted file mode 100644 index f3f85f65..00000000 --- a/tests/examplefiles/objc_example.m +++ /dev/null @@ -1,179 +0,0 @@ -// Test various types of includes -#import -# import -#import "stdio.h" -#\ - import \ - "stdlib.h" -# /*line1*/ \ -import /* line 2 */ \ -"stdlib.h" // line 3 - -// Commented out code with preprocessor -#if 0 -#define MY_NUMBER 3 -#endif - - #\ - if 1 -#define TEST_NUMBER 3 -#endif - -// Empty preprocessor -# - -// Class forward declaration -@class MyClass; - -// Empty classes -@interface EmptyClass -@end -@interface EmptyClass2 -{ -} -@end -@interface EmptyClass3 : EmptyClass2 -{ -} -@end - -// Custom class inheriting from built-in -@interface MyClass : NSObject -{ -@public - NSString *myString; - __weak NSString *_weakString; -@protected - NSTextField *_textField; -@private - NSDate *privateDate; -} - -// Various property aatributes -@property(copy, readwrite, nonatomic) NSString *myString; -@property(weak) NSString *weakString; -@property(retain, strong, atomic) IBOutlet NSTextField *textField; - -// Class methods -+ (void)classMethod1:(NSString *)arg; -+ (void)classMethod2:(NSString *) arg; // Test space before arg - -@end - -typedef id B; - -#pragma mark MyMarker - -// MyClass.m -// Class extension to declare private property -@interface MyClass () -@property(retain) NSDate *privateDate; -- (void)hiddenMethod; -@end - -// Special category -@interface MyClass (Special) -@property(retain) NSDate *specialDate; -@end - -@implementation MyClass -@synthesize myString; -@synthesize privateDate; - -- (id)a:(B)b { - /** - * C-style comment - */ - - // Selector keywords/types - SEL someMethod = @selector(hiddenMethod); - - // Boolean types - Boolean b1 = FALSE; - BOOL b2 = NO; - bool b3 = true; - - /** - * Number literals - */ - // Int Literal - NSNumber *n1 = @( 1 ); - // Method call - NSNumber *n2 = @( [b length] ); - // Define variable - NSNumber *n3 = @( TEST_NUMBER ); - // Arthimetic expression - NSNumber *n4 = @(1 + 2); - // From variable - int myInt = 5; - NSNumber *n5 = @(myInt); - // Nest expression - NSNumber *n6 = @(1 + (2 + 6.0)); - // Bool literal - NSNumber *n7 = @NO; - // Bool expression - NSNumber *n8 = @(YES); - // Character - NSNumber *n9 = @'a'; - // int - NSNumber *n10 = @123; - // unsigned - NSNumber *n11 = @1234U; - // long - NSNumber *n12 = @1234567890L; - // float - NSNumber *n13 = @3.14F; - // double - NSNumber *n14 = @3.14F; - - // Array literals - NSArray *arr = @[ @"1", @"2" ]; - arr = @[ @[ @"1", @"2" ], [arr lastObject] ]; - [arr lastObject]; - [@[ @"1", @"2" ] lastObject]; - - // Dictionary literals - NSDictionary *d = @{ @"key": @"value" }; - [[d allKeys] lastObject]; - [[@{ @"key": @"value" } allKeys] lastObject]; - d = @{ @"key": @{ @"key": @"value" } }; - - [self hiddenMethod]; - [b length]; - [privateDate class]; - - NSDictionary *dictionary = [NSDictionary dictionaryWithObjectsAndKeys: - @"1", @"one", @"2", @"two", @"3", @"three", nil]; - - NSString *key; - for (key in dictionary) { - NSLog(@"Number: %@, Word: %@", key, [dictionary valueForKey:key]); - } - - // Blocks - int (^myBlock)(int arg1, int arg2); - NSString *(^myName)(NSString *) = ^(NSString *value) { - return value; - }; - - return nil; -} - -- (void)hiddenMethod { - // Synchronized block - @synchronized(self) { - [myString retain]; - [myString release]; - } -} - -+ (void)classMethod1:(NSString *)arg {} -+ (void)classMethod2:(NSString *) arg -{ - // Autorelease pool block - @autoreleasepool { - NSLog(@"Hello, World!"); - } -} - -@end diff --git a/tests/examplefiles/openedge_example b/tests/examplefiles/openedge_example deleted file mode 100644 index e8c17e33..00000000 --- a/tests/examplefiles/openedge_example +++ /dev/null @@ -1,34 +0,0 @@ -{include.i} -{nested.i {include.i}} - -&SCOPED-DEFINE MY_NAME "Abe" - -DEF VAR i AS INT NO-UNDO. -i = 0xABE + 1337 / (1 * 1.00) - -def var clowercasetest as char no-undo. -DEF VAR vardashtest AS DATETIME-TZ NO-UNDO. - -DEFINE TEMP-TABLE ttNames NO-UNDO - FIELD cName AS CHAR - INDEX IXPK_ttNames IS PRIMARY UNIQUE cName. - -/* One-line comment */ -/* Two-line - Comment */ -/* - Nested - /* - Multiline - /* - Comment - */ - */ -*/ - -CREATE ttNames. -ASSIGN ttNames.cName = {&MY_NAME}. - -FOR EACH ttNames: - MESSAGE "Hello, " + ttNames.cName + '!' VIEW-AS ALERT-BOX. -END. diff --git a/tests/examplefiles/pacman.conf b/tests/examplefiles/pacman.conf deleted file mode 100644 index 78dbf5e1..00000000 --- a/tests/examplefiles/pacman.conf +++ /dev/null @@ -1,49 +0,0 @@ -# -# /etc/pacman.conf -# -# This example file has no relation to `pacman.ijs` -# but is of configuration of Arch Linux's package manager `pacman`. -# - -# -# GENERAL OPTIONS -# -[options] -RootDir = /opt/local/site-private -#DBPath = /var/lib/pacman/ -#CacheDir = /var/cache/pacman/pkg/ -LogFile = /opt/local/site-private/var/log/pacman.log -#GPGDir = /etc/pacman.d/gnupg/ -HoldPkg = pacman -#XferCommand = /usr/bin/curl -C - -f %u > %o -XferCommand = /usr/local/bin/wget --passive-ftp -c -O %o %u -#CleanMethod = KeepInstalled -#UseDelta = 0.7 -Architecture = auto - -#IgnorePkg = -#IgnoreGroup = - -NoUpgrade = etc/passwd etc/group etc/shadow -NoUpgrade = etc/fstab -#NoExtract = - -#UseSyslog -Color -#TotalDownload -CheckSpace -#VerbosePkgLists - -#SigLevel = Never -SigLevel = Required DatabaseOptional -LocalFileSigLevel = Optional -RemoteFileSigLevel = Required - -Server = ftp://ftp9.yaphatchpotchgen.net/$repo/os/$arch - -[fubar32] -Include = /etc/pacman.d/mirrorlist.fubar32 # comment is allowed here - -#[custom] -#SigLevel = Optional TrustAll -#Server = file:///home/custompkgs diff --git a/tests/examplefiles/pacman.ijs b/tests/examplefiles/pacman.ijs deleted file mode 100644 index f067b6e2..00000000 --- a/tests/examplefiles/pacman.ijs +++ /dev/null @@ -1,1107 +0,0 @@ -cocurrent 'jpacman' -coinsert 'j' - -BASELIB=: 'base library' -DATAMASK=: 0 -HWNDP=: '' -ISGUI=: 0 -ONLINE=: 0 -PKGDATA=: 0 7$a: -SECTION=: ,<'All' -SYSNAME=: 'Package Manager' -TIMEOUT=: 60 -WWWREV=: REV=: _1 - -IgnoreIOS=: 0 : 0 -api/jni -data/dbman -data/ddmysql -data/odbc -demos/isigraph -demos/wd -demos/wdplot -games/minesweeper -games/nurikabe -games/pousse -games/solitaire -general/pcall -general/sfl -graphics/d3 -graphics/fvj3 -graphics/gl2 -graphics/gnuplot -graphics/graph -graphics/graphviz -graphics/jturtle -graphics/print -graphics/tgsj -graphics/treemap -graphics/viewmat -gui/monthview -gui/util -ide/qt -math/tabula -media/animate -media/gdiplus -media/image3 -media/ming -media/paint -media/wav -) - -Ignore=: 3 : 0'' -if. IFIOS do. - <;._2 IgnoreIOS -else. - <'ide/ios' -end. -) -3 : 0'' -nc=. '--no-cache' -if. IFUNIX do. - if. UNAME-:'Darwin' do. - HTTPCMD=: 'curl -o %O --stderr %L -f -s -S %U' - elseif. do. - if. 'Android'-:UNAME do. nc=. '' - else. try. nc=. nc #~ 1 e. nc E. shell 'wget --help' catch. nc=. '' end. end. - HTTPCMD=: 'wget ',nc,' -O %O -o %L -t %t %U' - end. -else. - if. fexist exe=. jpath '~tools/ftp/wget.exe' do. exe=. '"',exe,'"' else. exe=. 'wget.exe' end. - try. nc=. nc #~ 1 e. nc E. shell exe,' --help' catch. nc=. '' end. - HTTPCMD=: exe,' ',nc,' -O %O -o %L -t %t -T %T %U' - if. fexist UNZIP=: jpath '~tools/zip/unzip.exe' do. UNZIP=: '"',UNZIP,'" -o -C ' else. UNZIP=: 'unzip.exe -o -C ' end. -end. -) -setfiles=: 3 : 0 -ADDCFG=: jpath '~addons/config/' -makedir ADDCFG -ADDCFGIJS=: ADDCFG,'config.ijs' -JRELEASE=: ({.~i.&'/') 9!:14'' -JRELEASE=: 'j802' -LIBTREE=: readtree'' -if. IFIOS do. - WWW=: '/jal/',JRELEASE,'/' -else. - WWW=: 'http://www.jsoftware.com/jal/',JRELEASE,'/' -end. -LIBVER=: jpath '~system/config/version.txt' -) -destroy=: codestroy -CFGFILES=: <;._2 (0 : 0) -addons.txt -library.txt -release.txt -revision.txt -zips.txt -) -LIBDESC=: 0 : 0 -This is the base library of scripts and labs included in the J system. - -Reinstalling or upgrading this library will overwrite files in the system subdirectory. Restart J afterwards. - -Files outside the system subdirectory, such as profile.ijs, are not changed. -) -cutjal=: ([: (* 4 > +/\) ' ' = ]) <;._1 ] -cutjsp=: ([: (* 5 > +/\) ' ' = ]) <;._1 ] -dquote=: '"'&, @ (,&'"') -fname=: #~ ([: *./\. ~:&'/') -hostcmd=: [: 2!:0 '(' , ] , ' || true)'"_ -ischar=: 2 = 3!:0 -rnd=: [ * [: <. 0.5 + %~ -sep2under=: '/' & (I.@('_' = ])}) -termLF=: , (0 < #) # LF -. {: -todel=: ; @: (DEL&, @ (,&(DEL,' ')) each) -tolist=: }. @ ; @: (LF&,@,@":each) -isjpkgout=: ((4 = {:) *. 2 = #)@$ *. 1 = L. -getintro=: ('...' ,~ -&3@[ {. ])^:(<#) -info=: smoutput -getnames=: 3 : 0 -select. L.y -case. 0 do. - if. +/ BASELIB E. y do. - y=. ({:"1 y -y=. (45&getintro &.> idx{y) idx}y -) -deltree=: 3 : 0 -try. - res=. 0< ferase {."1 dirtree y - *./ res,0 #y do. i.0 5 return. end. -m=. _2 |. (LF,')',LF) E. y -r=. _2 }. each m <;._2 y -x=. r i.&> LF -d=. (x+1) }.each r -r=. x {.each r -r=. 3 {."1 cutjal &> ' ' ,each r -x=. d i.&> LF -c=. x {.each d -d=. (x+1) }.each d -r,.c,.d -) -fixjal2=: 3 : 0 -if. 2 > #y do. i.0 2 return. end. -cutjal &> ' ' ,each <;._2 y -) -fixjsp=: 3 : 0 -if. 2 > #y do. i.0 5 return. end. -m=. _2 |. (LF,')',LF) E. y -r=. _2 }. each m <;._2 y -x=. r i.&> LF -d=. (x+1) }.each r -r=. x {.each r -r=. ' ' ,each r -(cutjsp &> r),.d -) -fixlib=: 3 : 0 -msk=. ( #y do. - i.0 6 return. -end. -fls=. <;._2 y -ndx=. fls i.&> ' ' -siz=. <&> 0 ". (ndx+1) }.&> fls -fls=. ndx {.each fls -zps=. <;._2 &> fls ,each '_' -pfm=. 3 {"1 zps -uname=. tolower UNAME -msk=. (uname -: ({.~ i.&'.')) &> pfm -if. 1 ~: +/msk do. msk=. 1,~ }:0*.msk end. -msk # zps,.fls,.siz -) -fixrev=: 3 : 0 -{. _1 ". :: _1: y -. CRLF -) -fixupd=: 3 : 0 -_1 ". :: _1: y -. CRLF -) -fixver=: 3 : 0 -if. ischar y do. - y=. y -. CRLF - y=. 0 ". ' ' (I. y='.') } y -end. -3 {. y -) -fixvers=: 3 : 0 -s=. $y -y=. ,y -3 {."1 [ 0 ". s $ ' ' (I. y e. './') } y -) -fmtjal=: 3 : 0 -if. 0 = #y do. '' return. end. -r=. (4 {."1 y) ,each "1 ' ',LF2 -r=. <@; "1 r -; r ,each ({:"1 y) ,each <')',LF -) -fmtjal2=: 3 : 0 -if. 0 = #y do. '' return. end. -; (2 {."1 y) ,each "1 ' ',LF -) -fmtdep=: 3 : 0 -}. ; ',' ,each a: -.~ <;._2 y -) -fmtjsp=: 3 : 0 -if. 0 = #y do. '' return. end. -r=. (4 {."1 y) ,each "1 ' ',LF -r=. <@; "1 r -; r ,each ({:"1 y) ,each <')',LF -) -fmtlib=: 3 : 0 -, 'q<.>,q<.>r<0>3.0,r<0>3.0' 8!:2 y -) -fmtver=: 3 : 0 -if. 0=#y do. '' return. end. -if. ischar y do. y return. end. -}. ; '.' ,each ": each y -) -fmtverlib=: 3 : 0 -fmtver y -) -fixzips=: 3 : 0 -if. 2 > #y do. i.0 5 return. end. -fls=. <;._2 y -ndx=. fls i.&> ' ' -siz=. 0 ". (ndx+1) }.&> fls -fls=. ndx {.each fls -zps=. <;._2 &> fls ,each '_' -zps=. zps,.fls,.<&>siz -pfm=. 3 {"1 zps -and=. (1 e. 'android'&E.) &> pfm -lnx=. (1 e. 'linux'&E.) &> pfm -mac=. (1 e. 'darwin'&E.) &> pfm -win=. mac < (1 e. 'win'&E.) &> pfm - -select. UNAME -case. 'Win' do. - zps=. win # zps -case. 'Linux' do. - zps=. lnx # zps -case. 'Android' do. - zps=. and # zps -case. 'Darwin' do. - zps=. mac # zps - zps=. zps /: 3 {"1 zps - zps=. (~: 3 {."1 zps) # zps -end. - -bit=. IF64 pick '64';'32' -pfm=. 3 {"1 zps -exc=. (1 e. bit&E.) &> pfm -zps=. zps \: exc -zps=. (~: 3 {."1 zps) # zps -fnm=. 0 {"1 zps -lnm=. 1 {"1 zps -ver=. 2 {"1 zps -pfm=. 3 {"1 zps -fls=. 4 {"1 zps -siz=. 5 {"1 zps -nms=. fnm ,each '/' ,each lnm -pfm=. (pfm i.&> '.') {.each pfm -ndx=. \: # &> pfm -sort ndx { nms,.pfm,.ver,.fls,.siz -) -fwritenew=: 4 : 0 -if. x -: fread y do. - 0 -else. - x fwrite y -end. -) -platformparent=: 3 : 0 -((< _2 {. y) e. '32';'64') # _2 }. y -) -makedir=: 1!:5 :: 0: @ < -plural=: 4 : 0 -y,(1=x)#'s' -) -sizefmt=: 3 : 0 -select. +/ y >: 1e3 1e4 1e6 1e7 1e9 -case. 0 do. - (": y), ' byte',(y~:1)#'s' -case. 1 do. - (": 0.1 rnd y%1e3),' KB' -case. 2 do. - (": 1 rnd y%1e3),' KB' -case. 3 do. - (": 0.1 rnd y%1e6),' MB' -case. 4 do. - (": 1 rnd y%1e6),' MB' -case. do. - (": 0.1 rnd y%1e9),' GB' -end. -) -shellcmd=: 3 : 0 -if. IFUNIX do. - hostcmd y -else. - spawn_jtask_ y -end. -) -subdir=: 3 : 0 -if. 0=#y do. '' return. end. -a=. 1!:0 y,'*' -if. 0=#a do. '' return. end. -a=. a #~ '-d' -:"1 [ 1 4 {"1 > 4 {"1 a -( '/mnt/sdcard'-:2!:5'EXTERNAL_STORAGE' do. notarcmd=. 1 end. - end. - if. notarcmd do. - require 'tar' - 'file dir'=. y - if. (i.0 0) -: tar 'x';file;dir do. e=. '' end. - else. - e=. shellcmd 'tar ',((IFIOS+:UNAME-:'Android')#(('Darwin'-:UNAME){::'--no-same-owner --no-same-permissions';'-o -p')),' -xzf ',file,' -C ',dir - end. - if. (0~:FHS) *. ('root'-:2!:5'USER') +. (<2!:5'HOME') e. 0;'/var/root';'/root';'';,'/' do. - shellcmd ::0: 'find ',dir,' -type d -exec chmod a+rx {} \+' - shellcmd ::0: 'find ',dir,' -type f -exec chmod a+r {} \+' - end. -else. - dir=. (_2&}. , '/' -.~ _2&{.) dir - e=. shellcmd UNZIP,' ',file,' -d ',dir -end. -e -) -zipext=: 3 : 0 -y, IFUNIX pick '.zip';'.tar.gz' -) -CHECKADDONSDIR=: 0 : 0 -The addons directory does not exist and cannot be created. - -It is set to: XX. - -You can either create the directory manually, or set a new addons directory in your profile script. -) -CHECKASK=: 0 : 0 -Read catalog from the server using Internet connection now? - -Otherwise the local catalog is used offline. -) -CHECKONLINE=: 0 : 0 -An active Internet connection is needed to install packages. - -Continue only if you have an active Internet connection. - -OK to continue? -) -CHECKREADSVR=: 0 : 0 -An active Internet connection is needed to read the server repository catalog. - -Continue only if you have an active Internet connection. - -OK to continue? -) -CHECKSTARTUP=: 0 : 0 -Setup repository using Internet connection now? - -Select No if not connected, to complete setup later. After Setup is done, repository can be used offline with more options in Tools menu and Preferences dialog. -) -checkaccess=: 3 : 0 -if. testaccess'' do. 1 return. end. -msg=. 'Unable to run Package Manager, as you do not have access to the installation folder.' -if. IFWIN do. - msg=. msg,LF2,'To run as Administrator, right-click the J icon, select Run as... and ' - msg=. msg,'then select Adminstrator.' -end. -info msg -0 -) -checkaddonsdir=: 3 : 0 -d=. jpath '~addons' -if. # 1!:0 d do. 1 return. end. -if. 1!:5 :: 0: : 0 do. - ONLINE=: 0 - log 'Using local copy of catalog. See Preferences to change the setting.' - 1 return. - end. - if. 0 = getonline 'Read Catalog from Server';CHECKREADSVR do. 0 return. end. -case. 1 do. - ONLINE=: 1 -case. 2 do. - if. REV >: 0 do. - if. 0 = getonline 'Read Catalog from Server';CHECKASK do. - log 'Using local copy of catalog. See Preferences to change the setting.' - 1 return. - end. - else. - if. 0 = getonline 'Setup Repository';CHECKSTARTUP do. 0 return. end. - end. -end. -log 'Updating server catalog...' -if. 0 = getserver'' do. - ONLINE=: 0 - log 'Working offline using local copy of catalog.' -else. - log 'Done.' -end. -1 -) -checkstatus=: 3 : 0 -if. 0 e. #LIBS do. '' return. end. -msk=. masklib PKGDATA -ups=. pkgups'' -libupm=. 1 e. msk *. ups -msk=. -. msk -addnim=. +/msk *. pkgnew'' -addupm=. +/msk *. pkgups'' -tot=. +/addnim,addupm,libupm -if. 0 = tot do. - 'All available packages are installed and up to date.' return. -end. -select. 0 < addnim,addupm -case. 0 0 do. - msg=. 'Addons are up to date.' -case. 0 1 do. - msg=. 'All addons are installed, ',(":addupm), ' can be upgraded.' -case. 1 0 do. - if. addnim = <:#PKGDATA do. - msg=. 'No addons are installed.' - else. - j=. ' addon',('s'#~1: fsize p do. - if. _1-:msg=. freads q do. - if. 0=#msg=. e do. msg=. 'Unexpected error' end. end. - log 'Connection failed: ',msg - info 'Connection failed:',LF2,msg - r=. 1;msg - ferase p;q -else. - r=. 0;p - ferase q -end. -r -) -httpgetr=: 3 : 0 -res=. httpget y -if. 0 = 0 pick res do. - f=. 1 pick res - txt=. freads f - ferase f - 0;txt -end. -) -install=: 3 : 0 -dat=. getdepend y -'num siz'=. pmview_applycounts dat -many=. 1 < num -msg=. 'Installing ',(":num),' package',many#'s' -msg=. msg,' of ',(many#'total '),'size ',sizefmt siz -log msg -installdo 1 {"1 dat -log 'Done.' -readlocal'' -pacman_init 0 -) -install_console=: 3 : 0 - if. -. init_console 'server' do. '' return. end. - pkgs=. getnames y - if. pkgs -: ,<'all' do. pkgs=. 1 {"1 PKGDATA end. - pkgs=. pkgs (e. # [) ~. (<'base library'), ((pkgnew +. pkgups) # 1&{"1@]) PKGDATA - pkgs=. pkgs -. Ignore - pkgs=. getdepend_console pkgs - if. 0 = num=. #pkgs do. '' return. end. - many=. 1 < num - msg=. 'Installing ',(":num),' package',many#'s' - log msg - installdo pkgs - log 'Done.' - readlocal'' - pacman_init '' - checkstatus'' -) -upgrade_console=: 3 : 0 - if. -. init_console 'read' do. '' return. end. - pkgs=. getnames y - if. (0=#pkgs) +. pkgs -: ,<'all' do. pkgs=. 1{"1 PKGDATA end. - pkgs=. pkgs (e. # [) (pkgups # 1&{"1@])PKGDATA - install_console pkgs -) -installdo=: 3 : 0 -msk=. -. y e. :fsize jpath'~addons/',y,'/manifest.ijs' do. - log 'Extraction failed: ',msg - info 'Extraction failed:',LF2,msg - return. -end. -install_addins y -install_config y -) -install_addins=: 3 :0 -fl=. ADDCFG,'addins.txt' -ins=. fixjal2 freads fl -ins=. ins #~ ( txt -msk=. fexist &> ( msk # 1 {"1 PKGDATA) ,. res - res=. (2#LF) joinstring (70&foldtext)&.> res - end. - case. 'showinstalled' do. - dat=. (isjpkgout y) {:: (1 2 3 4 {"1 PKGDATA); (pkgs) ,&.> <'/',x,(x-:'history'){::'.ijs';'.txt' - res=. res #~ msk=. (<_1) ~: res=. fread@jpath &.> fn - if. #res do. - res=. ,((<'== '), &.> msk#pkgs) ,. res - res=. (2#LF) joinstring res - end. -) -remove_console=: 3 : 0 - if. -. init_console 'edit' do. '' return. end. - pkgs=. getnames y - if. pkgs -: ,<'all' do. pkgs=. 1 {"1 PKGDATA end. - pkgs=. pkgs (e. # [) (-.@pkgnew # 1&{"1@]) PKGDATA - pkgs=. pkgs -. . fixver freads LIBVER -) -readlocal=: 3 : 0 -readlin'' -ADDONS=: fixjal freads ADDCFG,'addons.txt' -ADDINS=: fixjal2 freads ADDCFG,'addins.txt' -REV=: fixrev freads ADDCFG,'revision.txt' -LASTUPD=: fixupd freads ADDCFG,'lastupdate.txt' -LIBS=: fixlibs freads ADDCFG,'library.txt' -LIB=: fixlib LIBS -ZIPS=: fixzips freads ADDCFG,'zips.txt' -EMPTY -) -readtree=: 3 : 0 -f=. ADDCFG,'tree.txt' -tree=. LF -.~ freads f -if. -. (d),'manifest.ijs' - if. mft -: _1 do. continue. end. - VERSION=: '' - 0!:100 mft - ver=. fmtver fixver VERSION - n=. }: (#p) }. >d - n=. '/' (I.n='\') } n - r=. r,n,' ',ver,LF - s=. s,d -end. -r fwritenew f -s=. (#p) }.each }: each s -install_labs each s -write_config'' -) -refreshjal=: 3 : 0 -'rc p'=. httpget WWW,zipext 'jal' -if. rc do. 0 return. end. -unzip p;ADDCFG -ferase p -if. *./ CFGFILES e. {."1 [ 1!:0 ADDCFG,'*' do. 1 return. end. -msg=. 'Could not install the local repository catalog.' -log msg -info msg -0 -) -updatejal=: 3 : 0 - log 'Updating server catalog...' - if. -. init_console 'server' do. '' return. end. - refreshaddins'' - readlocal'' - pacman_init'' - res=. checklastupdate'' - res,LF,checkstatus'' -) -RELIBMSG=: 0 : 0 -You are now using the XX base library, and can switch to the YY base library. - -This will download the YY version of the base library and overwrite existing files. Addons are not affected. - -OK to switch to the YY library? -) -prelib=: 3 : 0 -old=. LIBTREE -new=. (('stable';'current') i. (2-s) {"1 dat -srv=. fixvers > (3-s) {"1 dat -{."1 /:"2 srv ,:"1 loc -) -pkgnew=: 3 : 0 -dat=. (s=.isjpkgout y){:: PKGDATA; (2-s) {"1 dat -) -pkgups=: pkgnew < pkglater -pkgsearch=: 3 : 0 - +./"1 +./ y E."1&>"(0 _) 1{"1 PKGDATA -) -pkgshow=: 3 : 0 - y e.~ 1{"1 PKGDATA -) -setshowall=: 3 : 0 -PKGDATA=: ( '/') {.each nms -SECTION=: 'All';nms -DATAMASK=: (#PKGDATA) $ 1 -EMPTY -) -init_console=: 3 : 0 - if. 0=#y do. y=. 'read' end. - select. y - fcase. 'edit';'server' do. - if. -. checkaccess'' do. 0 return. end. - case. 'read' do. - if. -. checkaddonsdir'' do. 0 return. end. - setfiles'' - readlocal'' - pacman_init '' - res=. 1 - case. do. res=. 0 - end. - if. y -: 'server' do. res=. getserver'' end. - res -) -jpkg=: 4 : 0 - select. x - case. 'history';'manifest' do. - x showfiles_console y - case. 'install' do. - install_console y - case. 'reinstall' do. - remove_console y - install_console y - case. 'remove' do. - remove_console y - case. ;:'show search showinstalled shownotinstalled showupgrade status' do. - x show_console y - case. 'update' do. - updatejal '' - case. 'upgrade' do. - upgrade_console y - case. do. - msg=. 'Valid options are:',LF - msg=. msg,' history, install, manifest, remove, reinstall, show, search,',LF - msg=. msg,' showinstalled, shownotinstalled, showupgrade, status,',LF - msg,' update, upgrade' - end. -) -do_install=: 3 : 0 -if. -. checkaccess_jpacman_ '' do. return. end. -'update' jpkg '' -select. y -case. 'qtide';'angle' do. - 'install' jpkg 'base library ide/qt' - getqtbin (y-:'angle'){::0;'angle' - msg=. (+/ 2 1 * IFWIN,'Darwin'-:UNAME) pick 'jqt.sh';'the jqt icon';'jqt.cmd' - smoutput 'exit and restart J using ',msg -case. 'all' do. - 'install' jpkg 'all' - getqtbin 0 -end. -) -do_getqtbin=: 3 : 0 -smoutput 'Installing JQt binaries...' -if. 'Linux'-:UNAME do. - if. IFRASPI do. - z=. 'jqt-raspi-32.tar.gz' - else. - z=. 'jqt-',((y-:'slim') pick 'linux';'slim'),'-',(IF64 pick 'x86';'x64'),'.tar.gz' - end. - z1=. 'libjqt.so' -elseif. IFWIN do. - z=. 'jqt-win',((y-:'slim')#'slim'),'-',(IF64 pick 'x86';'x64'),'.zip' - z1=. 'jqt.dll' -elseif. do. - z=. 'jqt-mac',((y-:'slim')#'slim'),'-',(IF64 pick 'x86';'x64'),'.zip' - z1=. 'libjqt.dylib' -end. -'rc p'=. httpget_jpacman_ 'http://www.jsoftware.com/download/j802/qtide/',z -if. rc do. - smoutput 'unable to download: ',z return. -end. -d=. jpath '~bin' -if. IFWIN do. - unzip_jpacman_ p;d -else. - if. 'Linux'-:UNAME do. - if. (0~:FHS) do. - if. IFRASPI do. - d1=. '/usr/lib/arm-linux-gnueabihf/.' - elseif. IF64 do. - d1=. '/usr/lib/x86_64-linux-gnu/.' - elseif. do. - d1=. '/usr/lib/i386-linux-gnu/.' - end. - hostcmd_jpacman_ 'cd /usr/bin && tar --no-same-owner --no-same-permissions -xzf ',(dquote p), ' && chmod 755 jqt && chmod 644 libjqt.so && mv libjqt.so ',d1 - else. - hostcmd_jpacman_ 'cd ',(dquote d),' && tar xzf ',(dquote p) - end. - else. - hostcmd_jpacman_ 'unzip -o ',(dquote p),' -d ',dquote d - end. -end. -ferase p -if. #1!:0 ((0~:FHS)*.'Linux'-:UNAME){::(jpath '~bin/',z1);'/usr/bin/jqt' do. - m=. 'Finished install of JQt binaries.' -else. - m=. 'Unable to install JQt binaries.',LF - m=. m,'check that you have write permission for: ',LF,((0~:FHS)*.'Linux'-:UNAME){::(jpath '~bin');'/usr/bin' -end. -smoutput m -if. 'Linux'-:UNAME do. return. end. - -tgt=. jpath IFWIN{::'~install/Qt';'~bin/Qt5Core.dll' -y=. (*#y){::0;y -smoutput 'Installing Qt library...' -if. IFWIN do. - z=. 'qt53-',((y-:'angle') pick 'win';'angle'),'-',((y-:'slim')#'slim-'),(IF64 pick 'x86';'x64'),'.zip' -else. - z=. 'qt53-mac-',((y-:'slim')#'slim-'),(IF64 pick 'x86';'x64'),'.zip' -end. -'rc p'=. httpget_jpacman_ 'http://www.jsoftware.com/download/j802/qtlib/',z -if. rc do. - smoutput 'unable to download: ',z return. -end. -d=. jpath IFWIN{::'~install';'~bin' -if. IFWIN do. - unzip_jpacman_ p;d -else. - hostcmd_jpacman_ 'unzip -o ',(dquote p),' -d ',dquote d -end. -ferase p -if. #1!:0 tgt do. - m=. 'Finished install of Qt binaries.' -else. - m=. 'Unable to install Qt binaries.',LF - m=. m,'check that you have write permission for: ',LF,IFWIN{::tgt;jpath'~bin' -end. -smoutput m - -) -jpkg_z_=: 3 : 0 - 'help' jpkg y - : - a=. conew 'jpacman' - res=. x jpkg__a y - destroy__a'' - res -) -jpkgv_z_=: (<@:>"1@|:^:(0 ~: #))@jpkg \ No newline at end of file diff --git a/tests/examplefiles/pawn_example b/tests/examplefiles/pawn_example deleted file mode 100644 index ee2ecca2..00000000 --- a/tests/examplefiles/pawn_example +++ /dev/null @@ -1,25 +0,0 @@ -{include.i} -{nested.i {include.i}} - -&SCOPED-DEFINE MY_NAME "Abe" - -DEF VAR i AS INT NO-UNDO. -i = 0xABE + 1337 / (1 * 1.00) - -def var clowercasetest as char no-undo. -DEF VAR vardashtest AS DATETIME-TZ NO-UNDO. - -DEFINE TEMP-TABLE ttNames NO-UNDO - FIELD cName AS CHAR - INDEX IXPK_ttNames IS PRIMARY UNIQUE cName. - -/* One-line comment */ -/* Two-line - Comment */ - -CREATE ttNames. -ASSIGN ttNames.cName = {&MY_NAME}. - -FOR EACH ttNames: - MESSAGE "Hello, " + ttNames.cName + '!' VIEW-AS ALERT-BOX. -END. diff --git a/tests/examplefiles/perl_misc b/tests/examplefiles/perl_misc deleted file mode 100644 index e6dbfb28..00000000 --- a/tests/examplefiles/perl_misc +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/perl - -# from http://gist.github.com/485595 -use strict; -use warnings; -use Time::HiRes 'usleep'; - -for (1..5) { - open my $in, '<', '/proc/sys/kernel/random/entropy_avail' or die; - print <$in>; - close $in; - usleep 100_000; -} - -# other miscellaneous tests of numbers separated by _ -#usleep 100_000; -100_000_000; -my $nichts = 0.005_006; -print "$nichts\n"; -my $nichts2 = 0.005_006_007; -print 900_800_700.005_006_007, $/; - -# numbers from `man 1 perlnumber` -my $n; -$n = 1234; # decimal integer -$n = 0b1110011; # binary integer -$n = 01234; # octal integer -$n = 0x1234; # hexadecimal integer -$n = 12.34e-56; # exponential notation -$n = "-12.34e56"; # number specified as a string -$n = "1234"; # number specified as a string - -# other numbers -for ( - -9876, - +8765, - -9876.02, - -9876.02e+10, - +765_432e30, - 2002., - .2002, -) { - print $_, "\n"; -} - -# operators on numbers -for ( - $n + 300, - $n - 300, - $n / 300 + 10, - $n * 250 / 2.0, - $n == 100, - $n != 100, - $n > 100, - $n >= 100, - $n < 100, - $n <= 100, - $n % 2, - abs $n, -) { - print $_, "\n"; -} diff --git a/tests/examplefiles/perl_perl5db b/tests/examplefiles/perl_perl5db deleted file mode 100644 index ab9d5e30..00000000 --- a/tests/examplefiles/perl_perl5db +++ /dev/null @@ -1,998 +0,0 @@ - -=head1 NAME - -perl5db.pl - the perl debugger - -=head1 SYNOPSIS - - perl -d your_Perl_script - -=head1 DESCRIPTION - -After this routine is over, we don't have user code executing in the debugger's -context, so we can use C freely. - -=cut - -############################################## Begin lexical danger zone - -# 'my' variables used here could leak into (that is, be visible in) -# the context that the code being evaluated is executing in. This means that -# the code could modify the debugger's variables. -# -# Fiddling with the debugger's context could be Bad. We insulate things as -# much as we can. - -sub eval { - - # 'my' would make it visible from user code - # but so does local! --tchrist - # Remember: this localizes @DB::res, not @main::res. - local @res; - { - - # Try to keep the user code from messing with us. Save these so that - # even if the eval'ed code changes them, we can put them back again. - # Needed because the user could refer directly to the debugger's - # package globals (and any 'my' variables in this containing scope) - # inside the eval(), and we want to try to stay safe. - local $otrace = $trace; - local $osingle = $single; - local $od = $^D; - - # Untaint the incoming eval() argument. - { ($evalarg) = $evalarg =~ /(.*)/s; } - - # $usercontext built in DB::DB near the comment - # "set up the context for DB::eval ..." - # Evaluate and save any results. - @res = eval "$usercontext $evalarg;\n"; # '\n' for nice recursive debug - - # Restore those old values. - $trace = $otrace; - $single = $osingle; - $^D = $od; - } - - # Save the current value of $@, and preserve it in the debugger's copy - # of the saved precious globals. - my $at = $@; - - # Since we're only saving $@, we only have to localize the array element - # that it will be stored in. - local $saved[0]; # Preserve the old value of $@ - eval { &DB::save }; - - # Now see whether we need to report an error back to the user. - if ($at) { - local $\ = ''; - print $OUT $at; - } - - # Display as required by the caller. $onetimeDump and $onetimedumpDepth - # are package globals. - elsif ($onetimeDump) { - if ( $onetimeDump eq 'dump' ) { - local $option{dumpDepth} = $onetimedumpDepth - if defined $onetimedumpDepth; - dumpit( $OUT, \@res ); - } - elsif ( $onetimeDump eq 'methods' ) { - methods( $res[0] ); - } - } ## end elsif ($onetimeDump) - @res; -} ## end sub eval - -############################################## End lexical danger zone - -# After this point it is safe to introduce lexicals. -# The code being debugged will be executing in its own context, and -# can't see the inside of the debugger. -# -# However, one should not overdo it: leave as much control from outside as -# possible. If you make something a lexical, it's not going to be addressable -# from outside the debugger even if you know its name. - -# This file is automatically included if you do perl -d. -# It's probably not useful to include this yourself. -# -# Before venturing further into these twisty passages, it is -# wise to read the perldebguts man page or risk the ire of dragons. -# -# (It should be noted that perldebguts will tell you a lot about -# the underlying mechanics of how the debugger interfaces into the -# Perl interpreter, but not a lot about the debugger itself. The new -# comments in this code try to address this problem.) - -# Note that no subroutine call is possible until &DB::sub is defined -# (for subroutines defined outside of the package DB). In fact the same is -# true if $deep is not defined. - -# Enhanced by ilya@math.ohio-state.edu (Ilya Zakharevich) - -# modified Perl debugger, to be run from Emacs in perldb-mode -# Ray Lischner (uunet!mntgfx!lisch) as of 5 Nov 1990 -# Johan Vromans -- upgrade to 4.0 pl 10 -# Ilya Zakharevich -- patches after 5.001 (and some before ;-) - -# (We have made efforts to clarify the comments in the change log -# in other places; some of them may seem somewhat obscure as they -# were originally written, and explaining them away from the code -# in question seems conterproductive.. -JM) - -=head1 DEBUGGER INITIALIZATION - -The debugger starts up in phases. - -=head2 BASIC SETUP - -First, it initializes the environment it wants to run in: turning off -warnings during its own compilation, defining variables which it will need -to avoid warnings later, setting itself up to not exit when the program -terminates, and defaulting to printing return values for the C command. - -=cut - -# Needed for the statement after exec(): -# -# This BEGIN block is simply used to switch off warnings during debugger -# compiliation. Probably it would be better practice to fix the warnings, -# but this is how it's done at the moment. - -BEGIN { - $ini_warn = $^W; - $^W = 0; -} # Switch compilation warnings off until another BEGIN. - -# test if assertions are supported and actived: -BEGIN { - $ini_assertion = eval "sub asserting_test : assertion {1}; 1"; - - # $ini_assertion = undef => assertions unsupported, - # " = 1 => assertions supported - # print "\$ini_assertion=$ini_assertion\n"; -} - -local ($^W) = 0; # Switch run-time warnings off during init. - -=head2 THREADS SUPPORT - -If we are running under a threaded Perl, we require threads and threads::shared -if the environment variable C is set, to enable proper -threaded debugger control. C<-dt> can also be used to set this. - -Each new thread will be announced and the debugger prompt will always inform -you of each new thread created. It will also indicate the thread id in which -we are currently running within the prompt like this: - - [tid] DB<$i> - -Where C<[tid]> is an integer thread id and C<$i> is the familiar debugger -command prompt. The prompt will show: C<[0]> when running under threads, but -not actually in a thread. C<[tid]> is consistent with C usage. - -While running under threads, when you set or delete a breakpoint (etc.), this -will apply to all threads, not just the currently running one. When you are -in a currently executing thread, you will stay there until it completes. With -the current implementation it is not currently possible to hop from one thread -to another. - -The C and C commands are currently fairly minimal - see C and C. - -Note that threading support was built into the debugger as of Perl version -C<5.8.6> and debugger version C<1.2.8>. - -=cut - -BEGIN { - # ensure we can share our non-threaded variables or no-op - if ($ENV{PERL5DB_THREADED}) { - require threads; - require threads::shared; - import threads::shared qw(share); - $DBGR; - share(\$DBGR); - lock($DBGR); - print "Threads support enabled\n"; - } else { - *lock = sub(*) {}; - *share = sub(*) {}; - } -} - -# This would probably be better done with "use vars", but that wasn't around -# when this code was originally written. (Neither was "use strict".) And on -# the principle of not fiddling with something that was working, this was -# left alone. -warn( # Do not ;-) - # These variables control the execution of 'dumpvar.pl'. - $dumpvar::hashDepth, - $dumpvar::arrayDepth, - $dumpvar::dumpDBFiles, - $dumpvar::dumpPackages, - $dumpvar::quoteHighBit, - $dumpvar::printUndef, - $dumpvar::globPrint, - $dumpvar::usageOnly, - - # used to save @ARGV and extract any debugger-related flags. - @ARGS, - - # used to control die() reporting in diesignal() - $Carp::CarpLevel, - - # used to prevent multiple entries to diesignal() - # (if for instance diesignal() itself dies) - $panic, - - # used to prevent the debugger from running nonstop - # after a restart - $second_time, - ) - if 0; - -foreach my $k (keys (%INC)) { - &share(\$main::{'_<'.$filename}); -}; - -# Command-line + PERLLIB: -# Save the contents of @INC before they are modified elsewhere. -@ini_INC = @INC; - -# This was an attempt to clear out the previous values of various -# trapped errors. Apparently it didn't help. XXX More info needed! -# $prevwarn = $prevdie = $prevbus = $prevsegv = ''; # Does not help?! - -# We set these variables to safe values. We don't want to blindly turn -# off warnings, because other packages may still want them. -$trace = $signal = $single = 0; # Uninitialized warning suppression - # (local $^W cannot help - other packages!). - -# Default to not exiting when program finishes; print the return -# value when the 'r' command is used to return from a subroutine. -$inhibit_exit = $option{PrintRet} = 1; - -=head1 OPTION PROCESSING - -The debugger's options are actually spread out over the debugger itself and -C; some of these are variables to be set, while others are -subs to be called with a value. To try to make this a little easier to -manage, the debugger uses a few data structures to define what options -are legal and how they are to be processed. - -First, the C<@options> array defines the I of all the options that -are to be accepted. - -=cut - -@options = qw( - CommandSet - hashDepth arrayDepth dumpDepth - DumpDBFiles DumpPackages DumpReused - compactDump veryCompact quote - HighBit undefPrint globPrint - PrintRet UsageOnly frame - AutoTrace TTY noTTY - ReadLine NonStop LineInfo - maxTraceLen recallCommand ShellBang - pager tkRunning ornaments - signalLevel warnLevel dieLevel - inhibit_exit ImmediateStop bareStringify - CreateTTY RemotePort windowSize - DollarCaretP OnlyAssertions WarnAssertions -); - -@RememberOnROptions = qw(DollarCaretP OnlyAssertions); - -=pod - -Second, C lists the variables that each option uses to save its -state. - -=cut - -%optionVars = ( - hashDepth => \$dumpvar::hashDepth, - arrayDepth => \$dumpvar::arrayDepth, - CommandSet => \$CommandSet, - DumpDBFiles => \$dumpvar::dumpDBFiles, - DumpPackages => \$dumpvar::dumpPackages, - DumpReused => \$dumpvar::dumpReused, - HighBit => \$dumpvar::quoteHighBit, - undefPrint => \$dumpvar::printUndef, - globPrint => \$dumpvar::globPrint, - UsageOnly => \$dumpvar::usageOnly, - CreateTTY => \$CreateTTY, - bareStringify => \$dumpvar::bareStringify, - frame => \$frame, - AutoTrace => \$trace, - inhibit_exit => \$inhibit_exit, - maxTraceLen => \$maxtrace, - ImmediateStop => \$ImmediateStop, - RemotePort => \$remoteport, - windowSize => \$window, - WarnAssertions => \$warnassertions, -); - -=pod - -Third, C<%optionAction> defines the subroutine to be called to process each -option. - -=cut - -%optionAction = ( - compactDump => \&dumpvar::compactDump, - veryCompact => \&dumpvar::veryCompact, - quote => \&dumpvar::quote, - TTY => \&TTY, - noTTY => \&noTTY, - ReadLine => \&ReadLine, - NonStop => \&NonStop, - LineInfo => \&LineInfo, - recallCommand => \&recallCommand, - ShellBang => \&shellBang, - pager => \&pager, - signalLevel => \&signalLevel, - warnLevel => \&warnLevel, - dieLevel => \&dieLevel, - tkRunning => \&tkRunning, - ornaments => \&ornaments, - RemotePort => \&RemotePort, - DollarCaretP => \&DollarCaretP, - OnlyAssertions=> \&OnlyAssertions, -); - -=pod - -Last, the C<%optionRequire> notes modules that must be Cd if an -option is used. - -=cut - -# Note that this list is not complete: several options not listed here -# actually require that dumpvar.pl be loaded for them to work, but are -# not in the table. A subsequent patch will correct this problem; for -# the moment, we're just recommenting, and we are NOT going to change -# function. -%optionRequire = ( - compactDump => 'dumpvar.pl', - veryCompact => 'dumpvar.pl', - quote => 'dumpvar.pl', -); - -=pod - -There are a number of initialization-related variables which can be set -by putting code to set them in a BEGIN block in the C environment -variable. These are: - -=over 4 - -=item C<$rl> - readline control XXX needs more explanation - -=item C<$warnLevel> - whether or not debugger takes over warning handling - -=item C<$dieLevel> - whether or not debugger takes over die handling - -=item C<$signalLevel> - whether or not debugger takes over signal handling - -=item C<$pre> - preprompt actions (array reference) - -=item C<$post> - postprompt actions (array reference) - -=item C<$pretype> - -=item C<$CreateTTY> - whether or not to create a new TTY for this debugger - -=item C<$CommandSet> - which command set to use (defaults to new, documented set) - -=back - -=cut - -# These guys may be defined in $ENV{PERL5DB} : -$rl = 1 unless defined $rl; -$warnLevel = 1 unless defined $warnLevel; -$dieLevel = 1 unless defined $dieLevel; -$signalLevel = 1 unless defined $signalLevel; -$pre = [] unless defined $pre; -$post = [] unless defined $post; -$pretype = [] unless defined $pretype; -$CreateTTY = 3 unless defined $CreateTTY; -$CommandSet = '580' unless defined $CommandSet; - -share($rl); -share($warnLevel); -share($dieLevel); -share($signalLevel); -share($pre); -share($post); -share($pretype); -share($rl); -share($CreateTTY); -share($CommandSet); - -=pod - -The default C, C, and C handlers are set up. - -=cut - -warnLevel($warnLevel); -dieLevel($dieLevel); -signalLevel($signalLevel); - -=pod - -The pager to be used is needed next. We try to get it from the -environment first. if it's not defined there, we try to find it in -the Perl C. If it's not there, we default to C. We -then call the C function to save the pager name. - -=cut - -# This routine makes sure $pager is set up so that '|' can use it. -pager( - - # If PAGER is defined in the environment, use it. - defined $ENV{PAGER} - ? $ENV{PAGER} - - # If not, see if Config.pm defines it. - : eval { require Config } - && defined $Config::Config{pager} - ? $Config::Config{pager} - - # If not, fall back to 'more'. - : 'more' - ) - unless defined $pager; - -=pod - -We set up the command to be used to access the man pages, the command -recall character (C unless otherwise defined) and the shell escape -character (C unless otherwise defined). Yes, these do conflict, and -neither works in the debugger at the moment. - -=cut - -setman(); - -# Set up defaults for command recall and shell escape (note: -# these currently don't work in linemode debugging). -&recallCommand("!") unless defined $prc; -&shellBang("!") unless defined $psh; - -=pod - -We then set up the gigantic string containing the debugger help. -We also set the limit on the number of arguments we'll display during a -trace. - -=cut - -sethelp(); - -# If we didn't get a default for the length of eval/stack trace args, -# set it here. -$maxtrace = 400 unless defined $maxtrace; - -=head2 SETTING UP THE DEBUGGER GREETING - -The debugger I helps to inform the user how many debuggers are -running, and whether the current debugger is the primary or a child. - -If we are the primary, we just hang onto our pid so we'll have it when -or if we start a child debugger. If we are a child, we'll set things up -so we'll have a unique greeting and so the parent will give us our own -TTY later. - -We save the current contents of the C environment variable -because we mess around with it. We'll also need to hang onto it because -we'll need it if we restart. - -Child debuggers make a label out of the current PID structure recorded in -PERLDB_PIDS plus the new PID. They also mark themselves as not having a TTY -yet so the parent will give them one later via C. - -=cut - -# Save the current contents of the environment; we're about to -# much with it. We'll need this if we have to restart. -$ini_pids = $ENV{PERLDB_PIDS}; - -if ( defined $ENV{PERLDB_PIDS} ) { - - # We're a child. Make us a label out of the current PID structure - # recorded in PERLDB_PIDS plus our (new) PID. Mark us as not having - # a term yet so the parent will give us one later via resetterm(). - $pids = "[$ENV{PERLDB_PIDS}]"; - $ENV{PERLDB_PIDS} .= "->$$"; - $term_pid = -1; -} ## end if (defined $ENV{PERLDB_PIDS... -else { - - # We're the parent PID. Initialize PERLDB_PID in case we end up with a - # child debugger, and mark us as the parent, so we'll know to set up - # more TTY's is we have to. - $ENV{PERLDB_PIDS} = "$$"; - $pids = "{pid=$$}"; - $term_pid = $$; -} - -$pidprompt = ''; - -# Sets up $emacs as a synonym for $slave_editor. -*emacs = $slave_editor if $slave_editor; # May be used in afterinit()... - -=head2 READING THE RC FILE - -The debugger will read a file of initialization options if supplied. If -running interactively, this is C<.perldb>; if not, it's C. - -=cut - -# As noted, this test really doesn't check accurately that the debugger -# is running at a terminal or not. - -if ( -e "/dev/tty" ) { # this is the wrong metric! - $rcfile = ".perldb"; -} -else { - $rcfile = "perldb.ini"; -} - -=pod - -The debugger does a safety test of the file to be read. It must be owned -either by the current user or root, and must only be writable by the owner. - -=cut - -# This wraps a safety test around "do" to read and evaluate the init file. -# -# This isn't really safe, because there's a race -# between checking and opening. The solution is to -# open and fstat the handle, but then you have to read and -# eval the contents. But then the silly thing gets -# your lexical scope, which is unfortunate at best. -sub safe_do { - my $file = shift; - - # Just exactly what part of the word "CORE::" don't you understand? - local $SIG{__WARN__}; - local $SIG{__DIE__}; - - unless ( is_safe_file($file) ) { - CORE::warn < command is invoked, it -tries to capture all of the state it can into environment variables, and -then sets C. When we start executing again, we check to see -if C is there; if so, we reload all the information that -the R command stuffed into the environment variables. - - PERLDB_RESTART - flag only, contains no restart data itself. - PERLDB_HIST - command history, if it's available - PERLDB_ON_LOAD - breakpoints set by the rc file - PERLDB_POSTPONE - subs that have been loaded/not executed, and have actions - PERLDB_VISITED - files that had breakpoints - PERLDB_FILE_... - breakpoints for a file - PERLDB_OPT - active options - PERLDB_INC - the original @INC - PERLDB_PRETYPE - preprompt debugger actions - PERLDB_PRE - preprompt Perl code - PERLDB_POST - post-prompt Perl code - PERLDB_TYPEAHEAD - typeahead captured by readline() - -We chug through all these variables and plug the values saved in them -back into the appropriate spots in the debugger. - -=cut - -if ( exists $ENV{PERLDB_RESTART} ) { - - # We're restarting, so we don't need the flag that says to restart anymore. - delete $ENV{PERLDB_RESTART}; - - # $restart = 1; - @hist = get_list('PERLDB_HIST'); - %break_on_load = get_list("PERLDB_ON_LOAD"); - %postponed = get_list("PERLDB_POSTPONE"); - - share(@hist); - share(@truehist); - share(%break_on_load); - share(%postponed); - - # restore breakpoints/actions - my @had_breakpoints = get_list("PERLDB_VISITED"); - for ( 0 .. $#had_breakpoints ) { - my %pf = get_list("PERLDB_FILE_$_"); - $postponed_file{ $had_breakpoints[$_] } = \%pf if %pf; - } - - # restore options - my %opt = get_list("PERLDB_OPT"); - my ( $opt, $val ); - while ( ( $opt, $val ) = each %opt ) { - $val =~ s/[\\\']/\\$1/g; - parse_options("$opt'$val'"); - } - - # restore original @INC - @INC = get_list("PERLDB_INC"); - @ini_INC = @INC; - - # return pre/postprompt actions and typeahead buffer - $pretype = [ get_list("PERLDB_PRETYPE") ]; - $pre = [ get_list("PERLDB_PRE") ]; - $post = [ get_list("PERLDB_POST") ]; - @typeahead = get_list( "PERLDB_TYPEAHEAD", @typeahead ); -} ## end if (exists $ENV{PERLDB_RESTART... - -=head2 SETTING UP THE TERMINAL - -Now, we'll decide how the debugger is going to interact with the user. -If there's no TTY, we set the debugger to run non-stop; there's not going -to be anyone there to enter commands. - -=cut - -if ($notty) { - $runnonstop = 1; - share($runnonstop); -} - -=pod - -If there is a TTY, we have to determine who it belongs to before we can -proceed. If this is a slave editor or graphical debugger (denoted by -the first command-line switch being '-emacs'), we shift this off and -set C<$rl> to 0 (XXX ostensibly to do straight reads). - -=cut - -else { - - # Is Perl being run from a slave editor or graphical debugger? - # If so, don't use readline, and set $slave_editor = 1. - $slave_editor = - ( ( defined $main::ARGV[0] ) and ( $main::ARGV[0] eq '-emacs' ) ); - $rl = 0, shift(@main::ARGV) if $slave_editor; - - #require Term::ReadLine; - -=pod - -We then determine what the console should be on various systems: - -=over 4 - -=item * Cygwin - We use C instead of a separate device. - -=cut - - if ( $^O eq 'cygwin' ) { - - # /dev/tty is binary. use stdin for textmode - undef $console; - } - -=item * Unix - use C. - -=cut - - elsif ( -e "/dev/tty" ) { - $console = "/dev/tty"; - } - -=item * Windows or MSDOS - use C. - -=cut - - elsif ( $^O eq 'dos' or -e "con" or $^O eq 'MSWin32' ) { - $console = "con"; - } - -=item * MacOS - use C if this is the MPW version; C if not. - -Note that Mac OS X returns C, not C. Also note that the debugger doesn't do anything special for C. Maybe it should. - -=cut - - elsif ( $^O eq 'MacOS' ) { - if ( $MacPerl::Version !~ /MPW/ ) { - $console = - "Dev:Console:Perl Debug"; # Separate window for application - } - else { - $console = "Dev:Console"; - } - } ## end elsif ($^O eq 'MacOS') - -=item * VMS - use C. - -=cut - - else { - - # everything else is ... - $console = "sys\$command"; - } - -=pod - -=back - -Several other systems don't use a specific console. We C -for those (Windows using a slave editor/graphical debugger, NetWare, OS/2 -with a slave editor, Epoc). - -=cut - - if ( ( $^O eq 'MSWin32' ) and ( $slave_editor or defined $ENV{EMACS} ) ) { - - # /dev/tty is binary. use stdin for textmode - $console = undef; - } - - if ( $^O eq 'NetWare' ) { - - # /dev/tty is binary. use stdin for textmode - $console = undef; - } - - # In OS/2, we need to use STDIN to get textmode too, even though - # it pretty much looks like Unix otherwise. - if ( defined $ENV{OS2_SHELL} and ( $slave_editor or $ENV{WINDOWID} ) ) - { # In OS/2 - $console = undef; - } - - # EPOC also falls into the 'got to use STDIN' camp. - if ( $^O eq 'epoc' ) { - $console = undef; - } - -=pod - -If there is a TTY hanging around from a parent, we use that as the console. - -=cut - - $console = $tty if defined $tty; - -=head2 SOCKET HANDLING - -The debugger is capable of opening a socket and carrying out a debugging -session over the socket. - -If C was defined in the options, the debugger assumes that it -should try to start a debugging session on that port. It builds the socket -and then tries to connect the input and output filehandles to it. - -=cut - - # Handle socket stuff. - - if ( defined $remoteport ) { - - # If RemotePort was defined in the options, connect input and output - # to the socket. - require IO::Socket; - $OUT = new IO::Socket::INET( - Timeout => '10', - PeerAddr => $remoteport, - Proto => 'tcp', - ); - if ( !$OUT ) { die "Unable to connect to remote host: $remoteport\n"; } - $IN = $OUT; - } ## end if (defined $remoteport) - -=pod - -If no C was defined, and we want to create a TTY on startup, -this is probably a situation where multiple debuggers are running (for example, -a backticked command that starts up another debugger). We create a new IN and -OUT filehandle, and do the necessary mojo to create a new TTY if we know how -and if we can. - -=cut - - # Non-socket. - else { - - # Two debuggers running (probably a system or a backtick that invokes - # the debugger itself under the running one). create a new IN and OUT - # filehandle, and do the necessary mojo to create a new tty if we - # know how, and we can. - create_IN_OUT(4) if $CreateTTY & 4; - if ($console) { - - # If we have a console, check to see if there are separate ins and - # outs to open. (They are assumed identiical if not.) - - my ( $i, $o ) = split /,/, $console; - $o = $i unless defined $o; - - # read/write on in, or just read, or read on STDIN. - open( IN, "+<$i" ) - || open( IN, "<$i" ) - || open( IN, "<&STDIN" ); - - # read/write/create/clobber out, or write/create/clobber out, - # or merge with STDERR, or merge with STDOUT. - open( OUT, "+>$o" ) - || open( OUT, ">$o" ) - || open( OUT, ">&STDERR" ) - || open( OUT, ">&STDOUT" ); # so we don't dongle stdout - - } ## end if ($console) - elsif ( not defined $console ) { - - # No console. Open STDIN. - open( IN, "<&STDIN" ); - - # merge with STDERR, or with STDOUT. - open( OUT, ">&STDERR" ) - || open( OUT, ">&STDOUT" ); # so we don't dongle stdout - $console = 'STDIN/OUT'; - } ## end elsif (not defined $console) - - # Keep copies of the filehandles so that when the pager runs, it - # can close standard input without clobbering ours. - $IN = \*IN, $OUT = \*OUT if $console or not defined $console; - } ## end elsif (from if(defined $remoteport)) - - # Unbuffer DB::OUT. We need to see responses right away. - my $previous = select($OUT); - $| = 1; # for DB::OUT - select($previous); - - # Line info goes to debugger output unless pointed elsewhere. - # Pointing elsewhere makes it possible for slave editors to - # keep track of file and position. We have both a filehandle - # and a I/O description to keep track of. - $LINEINFO = $OUT unless defined $LINEINFO; - $lineinfo = $console unless defined $lineinfo; - # share($LINEINFO); # <- unable to share globs - share($lineinfo); # - -=pod - -To finish initialization, we show the debugger greeting, -and then call the C subroutine if there is one. - -=cut - - # Show the debugger greeting. - $header =~ s/.Header: ([^,]+),v(\s+\S+\s+\S+).*$/$1$2/; - unless ($runnonstop) { - local $\ = ''; - local $, = ''; - if ( $term_pid eq '-1' ) { - print $OUT "\nDaughter DB session started...\n"; - } - else { - print $OUT "\nLoading DB routines from $header\n"; - print $OUT ( - "Editor support ", - $slave_editor ? "enabled" : "available", ".\n" - ); - print $OUT -"\nEnter h or `h h' for help, or `$doccmd perldebug' for more help.\n\n"; - } ## end else [ if ($term_pid eq '-1') - } ## end unless ($runnonstop) -} ## end else [ if ($notty) - -# XXX This looks like a bug to me. -# Why copy to @ARGS and then futz with @args? -@ARGS = @ARGV; -for (@args) { - # Make sure backslashes before single quotes are stripped out, and - # keep args unless they are numeric (XXX why?) - # s/\'/\\\'/g; # removed while not justified understandably - # s/(.*)/'$1'/ unless /^-?[\d.]+$/; # ditto -} - -# If there was an afterinit() sub defined, call it. It will get -# executed in our scope, so it can fiddle with debugger globals. -if ( defined &afterinit ) { # May be defined in $rcfile - &afterinit(); -} - -# Inform us about "Stack dump during die enabled ..." in dieLevel(). -$I_m_init = 1; - - diff --git a/tests/examplefiles/perl_regex-delims b/tests/examplefiles/perl_regex-delims deleted file mode 100644 index 6da5298d..00000000 --- a/tests/examplefiles/perl_regex-delims +++ /dev/null @@ -1,120 +0,0 @@ -#! /usr/bin/env perl - -use strict; -use warnings; - -# common delimiters -print "a: "; -my $a = "foo"; -print $a, " - "; -$a =~ s/foo/bar/; -print $a, "\n"; - -print "b: "; -my $b = "foo"; -print $b, " - "; -$b =~ s!foo!bar!; -print $b, "\n"; - -print "c: "; -my $c = "foo"; -print $c, " - "; -$c =~ s@foo@bar@; -print $c, "\n"; - -print "d: "; -my $d = "foo"; -print $d, " - "; -$d =~ s\foo\bar\; -print $d, "\n"; - -print "\n"; - -# balanced delimiters -print "e: "; -my $e = "foo"; -print $e, " - "; -$e =~ s{foo}{bar}; -print $e, "\n"; - -print "f: "; -my $f = "foo"; -print $f, " - "; -$f =~ s(foo)(bar); -print $f, "\n"; - -print "g: "; -my $g = "foo"; -print $g, " - "; -$g =~ s; -print $g, "\n"; - -print "h: "; -my $h = "foo"; -print $h, " - "; -$h =~ s[foo][bar]; -print $h, "\n"; - -print "\n"; - -# balanced delimiters with whitespace -print "i: "; -my $i = "foo"; -print $i, " - "; -$i =~ s{foo} {bar}; -print $i, "\n"; - -print "j: "; -my $j = "foo"; -print $j, " - "; -$j =~ s ; -print $j, "\n"; - -print "k: "; -my $k = "foo"; -print $k, " - "; -$k =~ - s(foo) - - (bar); -print $k, "\n"; - -print "\n"; - -# mixed delimiters -print "l: "; -my $l = "foo"; -print $l, " - "; -$l =~ s{foo} ; -print $l, "\n"; - -print "m: "; -my $m = "foo"; -print $m, " - "; -$m =~ s(foo) !bar!; -print $m, "\n"; - -print "n: "; -my $n = "foo"; -print $n, " - "; -$n =~ s[foo] $bar$; -print $n, "\n"; - -print "\n"; - -# /x modifier -print "o: "; -my $o = "foo"; -print $o, " - "; -$o =~ s{ - foo - } {bar}x; -print $o, "\n"; - -print "p: "; -my $p = "foo"; -print $p, " - "; -$p =~ s% - foo - %bar%x; -print $p, "\n"; diff --git a/tests/examplefiles/perlfunc.1 b/tests/examplefiles/perlfunc.1 deleted file mode 100644 index 5f80f0d0..00000000 --- a/tests/examplefiles/perlfunc.1 +++ /dev/null @@ -1,856 +0,0 @@ -.\" Automatically generated by Pod::Man v1.37, Pod::Parser v1.32 -.\" -.\" Standard preamble: -.\" ======================================================================== -.de Sh \" Subsection heading -.br -.if t .Sp -.ne 5 -.PP -\fB\\$1\fR -.PP -.. -.de Sp \" Vertical space (when we can't use .PP) -.if t .sp .5v -.if n .sp -.. -.de Vb \" Begin verbatim text -.ft CW -.nf -.ne \\$1 -.. -.de Ve \" End verbatim text -.ft R -.fi -.. -.\" Set up some character translations and predefined strings. \*(-- will -.\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left -.\" double quote, and \*(R" will give a right double quote. | will give a -.\" real vertical bar. \*(C+ will give a nicer C++. Capital omega is used to -.\" do unbreakable dashes and therefore won't be available. \*(C` and \*(C' -.\" expand to `' in nroff, nothing in troff, for use with C<>. -.tr \(*W-|\(bv\*(Tr -.ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p' -.ie n \{\ -. ds -- \(*W- -. ds PI pi -. if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch -. if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch -. ds L" "" -. ds R" "" -. ds C` "" -. ds C' "" -'br\} -.el\{\ -. ds -- \|\(em\| -. ds PI \(*p -. ds L" `` -. ds R" '' -'br\} -.\" -.\" If the F register is turned on, we'll generate index entries on stderr for -.\" titles (.TH), headers (.SH), subsections (.Sh), items (.Ip), and index -.\" entries marked with X<> in POD. Of course, you'll have to process the -.\" output yourself in some meaningful fashion. -.if \nF \{\ -. de IX -. tm Index:\\$1\t\\n%\t"\\$2" -.. -. nr % 0 -. rr F -.\} -.\" -.\" For nroff, turn off justification. Always turn off hyphenation; it makes -.\" way too many mistakes in technical documents. -.hy 0 -.if n .na -.\" -.\" Accent mark definitions (@(#)ms.acc 1.5 88/02/08 SMI; from UCB 4.2). -.\" Fear. Run. Save yourself. No user-serviceable parts. -. \" fudge factors for nroff and troff -.if n \{\ -. ds #H 0 -. ds #V .8m -. ds #F .3m -. ds #[ \f1 -. ds #] \fP -.\} -.if t \{\ -. ds #H ((1u-(\\\\n(.fu%2u))*.13m) -. ds #V .6m -. ds #F 0 -. ds #[ \& -. ds #] \& -.\} -. \" simple accents for nroff and troff -.if n \{\ -. ds ' \& -. ds ` \& -. ds ^ \& -. ds , \& -. ds ~ ~ -. ds / -.\} -.if t \{\ -. ds ' \\k:\h'-(\\n(.wu*8/10-\*(#H)'\'\h"|\\n:u" -. ds ` \\k:\h'-(\\n(.wu*8/10-\*(#H)'\`\h'|\\n:u' -. ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'^\h'|\\n:u' -. ds , \\k:\h'-(\\n(.wu*8/10)',\h'|\\n:u' -. ds ~ \\k:\h'-(\\n(.wu-\*(#H-.1m)'~\h'|\\n:u' -. ds / \\k:\h'-(\\n(.wu*8/10-\*(#H)'\z\(sl\h'|\\n:u' -.\} -. \" troff and (daisy-wheel) nroff accents -.ds : \\k:\h'-(\\n(.wu*8/10-\*(#H+.1m+\*(#F)'\v'-\*(#V'\z.\h'.2m+\*(#F'.\h'|\\n:u'\v'\*(#V' -.ds 8 \h'\*(#H'\(*b\h'-\*(#H' -.ds o \\k:\h'-(\\n(.wu+\w'\(de'u-\*(#H)/2u'\v'-.3n'\*(#[\z\(de\v'.3n'\h'|\\n:u'\*(#] -.ds d- \h'\*(#H'\(pd\h'-\w'~'u'\v'-.25m'\f2\(hy\fP\v'.25m'\h'-\*(#H' -.ds D- D\\k:\h'-\w'D'u'\v'-.11m'\z\(hy\v'.11m'\h'|\\n:u' -.ds th \*(#[\v'.3m'\s+1I\s-1\v'-.3m'\h'-(\w'I'u*2/3)'\s-1o\s+1\*(#] -.ds Th \*(#[\s+2I\s-2\h'-\w'I'u*3/5'\v'-.3m'o\v'.3m'\*(#] -.ds ae a\h'-(\w'a'u*4/10)'e -.ds Ae A\h'-(\w'A'u*4/10)'E -. \" corrections for vroff -.if v .ds ~ \\k:\h'-(\\n(.wu*9/10-\*(#H)'\s-2\u~\d\s+2\h'|\\n:u' -.if v .ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'\v'-.4m'^\v'.4m'\h'|\\n:u' -. \" for low resolution devices (crt and lpr) -.if \n(.H>23 .if \n(.V>19 \ -\{\ -. ds : e -. ds 8 ss -. ds o a -. ds d- d\h'-1'\(ga -. ds D- D\h'-1'\(hy -. ds th \o'bp' -. ds Th \o'LP' -. ds ae ae -. ds Ae AE -.\} -.rm #[ #] #H #V #F C -.\" ======================================================================== -.\" -.IX Title "PERLFUNC 1" -.TH PERLFUNC 1 "2006-01-07" "perl v5.8.8" "Perl Programmers Reference Guide" -.SH "NAME" -.IX Xref "function" -perlfunc \- Perl builtin functions -.SH "DESCRIPTION" -.IX Header "DESCRIPTION" -The functions in this section can serve as terms in an expression. -They fall into two major categories: list operators and named unary -operators. These differ in their precedence relationship with a -following comma. (See the precedence table in perlop.) List -operators take more than one argument, while unary operators can never -take more than one argument. Thus, a comma terminates the argument of -a unary operator, but merely separates the arguments of a list -operator. A unary operator generally provides a scalar context to its -argument, while a list operator may provide either scalar or list -contexts for its arguments. If it does both, the scalar arguments will -be first, and the list argument will follow. (Note that there can ever -be only one such list argument.) For instance, \fIsplice()\fR has three scalar -arguments followed by a list, whereas \fIgethostbyname()\fR has four scalar -arguments. -.PP -In the syntax descriptions that follow, list operators that expect a -list (and provide list context for the elements of the list) are shown -with \s-1LIST\s0 as an argument. Such a list may consist of any combination -of scalar arguments or list values; the list values will be included -in the list as if each individual element were interpolated at that -point in the list, forming a longer single-dimensional list value. -Commas should separate elements of the \s-1LIST\s0. -.PP -Any function in the list below may be used either with or without -parentheses around its arguments. (The syntax descriptions omit the -parentheses.) If you use the parentheses, the simple (but occasionally -surprising) rule is this: It \fIlooks\fR like a function, therefore it \fIis\fR a -function, and precedence doesn't matter. Otherwise it's a list -operator or unary operator, and precedence does matter. And whitespace -between the function and left parenthesis doesn't count\*(--so you need to -be careful sometimes: -.PP -.Vb 5 -\& print 1+2+4; # Prints 7. -\& print(1+2) + 4; # Prints 3. -\& print (1+2)+4; # Also prints 3! -\& print +(1+2)+4; # Prints 7. -\& print ((1+2)+4); # Prints 7. -.Ve -.PP -If you run Perl with the \fB\-w\fR switch it can warn you about this. For -example, the third line above produces: -.PP -.Vb 2 -\& print (...) interpreted as function at - line 1. -\& Useless use of integer addition in void context at - line 1. -.Ve -.PP -A few functions take no arguments at all, and therefore work as neither -unary nor list operators. These include such functions as \f(CW\*(C`time\*(C'\fR -and \f(CW\*(C`endpwent\*(C'\fR. For example, \f(CW\*(C`time+86_400\*(C'\fR always means -\&\f(CW\*(C`time() + 86_400\*(C'\fR. -.PP -For functions that can be used in either a scalar or list context, -nonabortive failure is generally indicated in a scalar context by -returning the undefined value, and in a list context by returning the -null list. -.PP -Remember the following important rule: There is \fBno rule\fR that relates -the behavior of an expression in list context to its behavior in scalar -context, or vice versa. It might do two totally different things. -Each operator and function decides which sort of value it would be most -appropriate to return in scalar context. Some operators return the -length of the list that would have been returned in list context. Some -operators return the first value in the list. Some operators return the -last value in the list. Some operators return a count of successful -operations. In general, they do what you want, unless you want -consistency. -.IX Xref "context" -.PP -A named array in scalar context is quite different from what would at -first glance appear to be a list in scalar context. You can't get a list -like \f(CW\*(C`(1,2,3)\*(C'\fR into being in scalar context, because the compiler knows -the context at compile time. It would generate the scalar comma operator -there, not the list construction version of the comma. That means it -was never a list to start with. -.PP -In general, functions in Perl that serve as wrappers for system calls -of the same name (like \fIchown\fR\|(2), \fIfork\fR\|(2), \fIclosedir\fR\|(2), etc.) all return -true when they succeed and \f(CW\*(C`undef\*(C'\fR otherwise, as is usually mentioned -in the descriptions below. This is different from the C interfaces, -which return \f(CW\*(C`\-1\*(C'\fR on failure. Exceptions to this rule are \f(CW\*(C`wait\*(C'\fR, -\&\f(CW\*(C`waitpid\*(C'\fR, and \f(CW\*(C`syscall\*(C'\fR. System calls also set the special \f(CW$!\fR -variable on failure. Other functions do not, except accidentally. -.Sh "Perl Functions by Category" -.IX Xref "function" -.IX Subsection "Perl Functions by Category" -Here are Perl's functions (including things that look like -functions, like some keywords and named operators) -arranged by category. Some functions appear in more -than one place. -.IP "Functions for SCALARs or strings" 4 -.IX Xref "scalar string character" -.IX Item "Functions for SCALARs or strings" -\&\f(CW\*(C`chomp\*(C'\fR, \f(CW\*(C`chop\*(C'\fR, \f(CW\*(C`chr\*(C'\fR, \f(CW\*(C`crypt\*(C'\fR, \f(CW\*(C`hex\*(C'\fR, \f(CW\*(C`index\*(C'\fR, \f(CW\*(C`lc\*(C'\fR, \f(CW\*(C`lcfirst\*(C'\fR, -\&\f(CW\*(C`length\*(C'\fR, \f(CW\*(C`oct\*(C'\fR, \f(CW\*(C`ord\*(C'\fR, \f(CW\*(C`pack\*(C'\fR, \f(CW\*(C`q/STRING/\*(C'\fR, \f(CW\*(C`qq/STRING/\*(C'\fR, \f(CW\*(C`reverse\*(C'\fR, -\&\f(CW\*(C`rindex\*(C'\fR, \f(CW\*(C`sprintf\*(C'\fR, \f(CW\*(C`substr\*(C'\fR, \f(CW\*(C`tr///\*(C'\fR, \f(CW\*(C`uc\*(C'\fR, \f(CW\*(C`ucfirst\*(C'\fR, \f(CW\*(C`y///\*(C'\fR -.IP "Regular expressions and pattern matching" 4 -.IX Xref "regular expression regex regexp" -.IX Item "Regular expressions and pattern matching" -\&\f(CW\*(C`m//\*(C'\fR, \f(CW\*(C`pos\*(C'\fR, \f(CW\*(C`quotemeta\*(C'\fR, \f(CW\*(C`s///\*(C'\fR, \f(CW\*(C`split\*(C'\fR, \f(CW\*(C`study\*(C'\fR, \f(CW\*(C`qr//\*(C'\fR -.IP "Numeric functions" 4 -.IX Xref "numeric number trigonometric trigonometry" -.IX Item "Numeric functions" -\&\f(CW\*(C`abs\*(C'\fR, \f(CW\*(C`atan2\*(C'\fR, \f(CW\*(C`cos\*(C'\fR, \f(CW\*(C`exp\*(C'\fR, \f(CW\*(C`hex\*(C'\fR, \f(CW\*(C`int\*(C'\fR, \f(CW\*(C`log\*(C'\fR, \f(CW\*(C`oct\*(C'\fR, \f(CW\*(C`rand\*(C'\fR, -\&\f(CW\*(C`sin\*(C'\fR, \f(CW\*(C`sqrt\*(C'\fR, \f(CW\*(C`srand\*(C'\fR -.ie n .IP "Functions for real @ARRAYs" 4 -.el .IP "Functions for real \f(CW@ARRAYs\fR" 4 -.IX Xref "array" -.IX Item "Functions for real @ARRAYs" -\&\f(CW\*(C`pop\*(C'\fR, \f(CW\*(C`push\*(C'\fR, \f(CW\*(C`shift\*(C'\fR, \f(CW\*(C`splice\*(C'\fR, \f(CW\*(C`unshift\*(C'\fR -.IP "Functions for list data" 4 -.IX Xref "list" -.IX Item "Functions for list data" -\&\f(CW\*(C`grep\*(C'\fR, \f(CW\*(C`join\*(C'\fR, \f(CW\*(C`map\*(C'\fR, \f(CW\*(C`qw/STRING/\*(C'\fR, \f(CW\*(C`reverse\*(C'\fR, \f(CW\*(C`sort\*(C'\fR, \f(CW\*(C`unpack\*(C'\fR -.ie n .IP "Functions for real %HASHes" 4 -.el .IP "Functions for real \f(CW%HASHes\fR" 4 -.IX Xref "hash" -.IX Item "Functions for real %HASHes" -\&\f(CW\*(C`delete\*(C'\fR, \f(CW\*(C`each\*(C'\fR, \f(CW\*(C`exists\*(C'\fR, \f(CW\*(C`keys\*(C'\fR, \f(CW\*(C`values\*(C'\fR -.IP "Input and output functions" 4 -.IX Xref "I O input output dbm" -.IX Item "Input and output functions" -\&\f(CW\*(C`binmode\*(C'\fR, \f(CW\*(C`close\*(C'\fR, \f(CW\*(C`closedir\*(C'\fR, \f(CW\*(C`dbmclose\*(C'\fR, \f(CW\*(C`dbmopen\*(C'\fR, \f(CW\*(C`die\*(C'\fR, \f(CW\*(C`eof\*(C'\fR, -\&\f(CW\*(C`fileno\*(C'\fR, \f(CW\*(C`flock\*(C'\fR, \f(CW\*(C`format\*(C'\fR, \f(CW\*(C`getc\*(C'\fR, \f(CW\*(C`print\*(C'\fR, \f(CW\*(C`printf\*(C'\fR, \f(CW\*(C`read\*(C'\fR, -\&\f(CW\*(C`readdir\*(C'\fR, \f(CW\*(C`rewinddir\*(C'\fR, \f(CW\*(C`seek\*(C'\fR, \f(CW\*(C`seekdir\*(C'\fR, \f(CW\*(C`select\*(C'\fR, \f(CW\*(C`syscall\*(C'\fR, -\&\f(CW\*(C`sysread\*(C'\fR, \f(CW\*(C`sysseek\*(C'\fR, \f(CW\*(C`syswrite\*(C'\fR, \f(CW\*(C`tell\*(C'\fR, \f(CW\*(C`telldir\*(C'\fR, \f(CW\*(C`truncate\*(C'\fR, -\&\f(CW\*(C`warn\*(C'\fR, \f(CW\*(C`write\*(C'\fR -.IP "Functions for fixed length data or records" 4 -.IX Item "Functions for fixed length data or records" -\&\f(CW\*(C`pack\*(C'\fR, \f(CW\*(C`read\*(C'\fR, \f(CW\*(C`syscall\*(C'\fR, \f(CW\*(C`sysread\*(C'\fR, \f(CW\*(C`syswrite\*(C'\fR, \f(CW\*(C`unpack\*(C'\fR, \f(CW\*(C`vec\*(C'\fR -.IP "Functions for filehandles, files, or directories" 4 -.IX Xref "file filehandle directory pipe link symlink" -.IX Item "Functions for filehandles, files, or directories" -\&\f(CW\*(C`\-\f(CIX\f(CW\*(C'\fR, \f(CW\*(C`chdir\*(C'\fR, \f(CW\*(C`chmod\*(C'\fR, \f(CW\*(C`chown\*(C'\fR, \f(CW\*(C`chroot\*(C'\fR, \f(CW\*(C`fcntl\*(C'\fR, \f(CW\*(C`glob\*(C'\fR, -\&\f(CW\*(C`ioctl\*(C'\fR, \f(CW\*(C`link\*(C'\fR, \f(CW\*(C`lstat\*(C'\fR, \f(CW\*(C`mkdir\*(C'\fR, \f(CW\*(C`open\*(C'\fR, \f(CW\*(C`opendir\*(C'\fR, -\&\f(CW\*(C`readlink\*(C'\fR, \f(CW\*(C`rename\*(C'\fR, \f(CW\*(C`rmdir\*(C'\fR, \f(CW\*(C`stat\*(C'\fR, \f(CW\*(C`symlink\*(C'\fR, \f(CW\*(C`sysopen\*(C'\fR, -\&\f(CW\*(C`umask\*(C'\fR, \f(CW\*(C`unlink\*(C'\fR, \f(CW\*(C`utime\*(C'\fR -.IP "Keywords related to the control flow of your Perl program" 4 -.IX Xref "control flow" -.IX Item "Keywords related to the control flow of your Perl program" -\&\f(CW\*(C`caller\*(C'\fR, \f(CW\*(C`continue\*(C'\fR, \f(CW\*(C`die\*(C'\fR, \f(CW\*(C`do\*(C'\fR, \f(CW\*(C`dump\*(C'\fR, \f(CW\*(C`eval\*(C'\fR, \f(CW\*(C`exit\*(C'\fR, -\&\f(CW\*(C`goto\*(C'\fR, \f(CW\*(C`last\*(C'\fR, \f(CW\*(C`next\*(C'\fR, \f(CW\*(C`redo\*(C'\fR, \f(CW\*(C`return\*(C'\fR, \f(CW\*(C`sub\*(C'\fR, \f(CW\*(C`wantarray\*(C'\fR -.IP "Keywords related to scoping" 4 -.IX Item "Keywords related to scoping" -\&\f(CW\*(C`caller\*(C'\fR, \f(CW\*(C`import\*(C'\fR, \f(CW\*(C`local\*(C'\fR, \f(CW\*(C`my\*(C'\fR, \f(CW\*(C`our\*(C'\fR, \f(CW\*(C`package\*(C'\fR, \f(CW\*(C`use\*(C'\fR -.IP "Miscellaneous functions" 4 -.IX Item "Miscellaneous functions" -\&\f(CW\*(C`defined\*(C'\fR, \f(CW\*(C`dump\*(C'\fR, \f(CW\*(C`eval\*(C'\fR, \f(CW\*(C`formline\*(C'\fR, \f(CW\*(C`local\*(C'\fR, \f(CW\*(C`my\*(C'\fR, \f(CW\*(C`our\*(C'\fR, \f(CW\*(C`reset\*(C'\fR, -\&\f(CW\*(C`scalar\*(C'\fR, \f(CW\*(C`undef\*(C'\fR, \f(CW\*(C`wantarray\*(C'\fR -.IP "Functions for processes and process groups" 4 -.IX Xref "process pid process id" -.IX Item "Functions for processes and process groups" -\&\f(CW\*(C`alarm\*(C'\fR, \f(CW\*(C`exec\*(C'\fR, \f(CW\*(C`fork\*(C'\fR, \f(CW\*(C`getpgrp\*(C'\fR, \f(CW\*(C`getppid\*(C'\fR, \f(CW\*(C`getpriority\*(C'\fR, \f(CW\*(C`kill\*(C'\fR, -\&\f(CW\*(C`pipe\*(C'\fR, \f(CW\*(C`qx/STRING/\*(C'\fR, \f(CW\*(C`setpgrp\*(C'\fR, \f(CW\*(C`setpriority\*(C'\fR, \f(CW\*(C`sleep\*(C'\fR, \f(CW\*(C`system\*(C'\fR, -\&\f(CW\*(C`times\*(C'\fR, \f(CW\*(C`wait\*(C'\fR, \f(CW\*(C`waitpid\*(C'\fR -.IP "Keywords related to perl modules" 4 -.IX Xref "module" -.IX Item "Keywords related to perl modules" -\&\f(CW\*(C`do\*(C'\fR, \f(CW\*(C`import\*(C'\fR, \f(CW\*(C`no\*(C'\fR, \f(CW\*(C`package\*(C'\fR, \f(CW\*(C`require\*(C'\fR, \f(CW\*(C`use\*(C'\fR -.IP "Keywords related to classes and object-orientedness" 4 -.IX Xref "object class package" -.IX Item "Keywords related to classes and object-orientedness" -\&\f(CW\*(C`bless\*(C'\fR, \f(CW\*(C`dbmclose\*(C'\fR, \f(CW\*(C`dbmopen\*(C'\fR, \f(CW\*(C`package\*(C'\fR, \f(CW\*(C`ref\*(C'\fR, \f(CW\*(C`tie\*(C'\fR, \f(CW\*(C`tied\*(C'\fR, -\&\f(CW\*(C`untie\*(C'\fR, \f(CW\*(C`use\*(C'\fR -.IP "Low-level socket functions" 4 -.IX Xref "socket sock" -.IX Item "Low-level socket functions" -\&\f(CW\*(C`accept\*(C'\fR, \f(CW\*(C`bind\*(C'\fR, \f(CW\*(C`connect\*(C'\fR, \f(CW\*(C`getpeername\*(C'\fR, \f(CW\*(C`getsockname\*(C'\fR, -\&\f(CW\*(C`getsockopt\*(C'\fR, \f(CW\*(C`listen\*(C'\fR, \f(CW\*(C`recv\*(C'\fR, \f(CW\*(C`send\*(C'\fR, \f(CW\*(C`setsockopt\*(C'\fR, \f(CW\*(C`shutdown\*(C'\fR, -\&\f(CW\*(C`socket\*(C'\fR, \f(CW\*(C`socketpair\*(C'\fR -.IP "System V interprocess communication functions" 4 -.IX Xref "IPC System V semaphore shared memory memory message" -.IX Item "System V interprocess communication functions" -\&\f(CW\*(C`msgctl\*(C'\fR, \f(CW\*(C`msgget\*(C'\fR, \f(CW\*(C`msgrcv\*(C'\fR, \f(CW\*(C`msgsnd\*(C'\fR, \f(CW\*(C`semctl\*(C'\fR, \f(CW\*(C`semget\*(C'\fR, \f(CW\*(C`semop\*(C'\fR, -\&\f(CW\*(C`shmctl\*(C'\fR, \f(CW\*(C`shmget\*(C'\fR, \f(CW\*(C`shmread\*(C'\fR, \f(CW\*(C`shmwrite\*(C'\fR -.IP "Fetching user and group info" 4 -.IX Xref "user group password uid gid passwd etc passwd" -.IX Item "Fetching user and group info" -\&\f(CW\*(C`endgrent\*(C'\fR, \f(CW\*(C`endhostent\*(C'\fR, \f(CW\*(C`endnetent\*(C'\fR, \f(CW\*(C`endpwent\*(C'\fR, \f(CW\*(C`getgrent\*(C'\fR, -\&\f(CW\*(C`getgrgid\*(C'\fR, \f(CW\*(C`getgrnam\*(C'\fR, \f(CW\*(C`getlogin\*(C'\fR, \f(CW\*(C`getpwent\*(C'\fR, \f(CW\*(C`getpwnam\*(C'\fR, -\&\f(CW\*(C`getpwuid\*(C'\fR, \f(CW\*(C`setgrent\*(C'\fR, \f(CW\*(C`setpwent\*(C'\fR -.IP "Fetching network info" 4 -.IX Xref "network protocol host hostname IP address service" -.IX Item "Fetching network info" -\&\f(CW\*(C`endprotoent\*(C'\fR, \f(CW\*(C`endservent\*(C'\fR, \f(CW\*(C`gethostbyaddr\*(C'\fR, \f(CW\*(C`gethostbyname\*(C'\fR, -\&\f(CW\*(C`gethostent\*(C'\fR, \f(CW\*(C`getnetbyaddr\*(C'\fR, \f(CW\*(C`getnetbyname\*(C'\fR, \f(CW\*(C`getnetent\*(C'\fR, -\&\f(CW\*(C`getprotobyname\*(C'\fR, \f(CW\*(C`getprotobynumber\*(C'\fR, \f(CW\*(C`getprotoent\*(C'\fR, -\&\f(CW\*(C`getservbyname\*(C'\fR, \f(CW\*(C`getservbyport\*(C'\fR, \f(CW\*(C`getservent\*(C'\fR, \f(CW\*(C`sethostent\*(C'\fR, -\&\f(CW\*(C`setnetent\*(C'\fR, \f(CW\*(C`setprotoent\*(C'\fR, \f(CW\*(C`setservent\*(C'\fR -.IP "Time-related functions" 4 -.IX Xref "time date" -.IX Item "Time-related functions" -\&\f(CW\*(C`gmtime\*(C'\fR, \f(CW\*(C`localtime\*(C'\fR, \f(CW\*(C`time\*(C'\fR, \f(CW\*(C`times\*(C'\fR -.IP "Functions new in perl5" 4 -.IX Xref "perl5" -.IX Item "Functions new in perl5" -\&\f(CW\*(C`abs\*(C'\fR, \f(CW\*(C`bless\*(C'\fR, \f(CW\*(C`chomp\*(C'\fR, \f(CW\*(C`chr\*(C'\fR, \f(CW\*(C`exists\*(C'\fR, \f(CW\*(C`formline\*(C'\fR, \f(CW\*(C`glob\*(C'\fR, -\&\f(CW\*(C`import\*(C'\fR, \f(CW\*(C`lc\*(C'\fR, \f(CW\*(C`lcfirst\*(C'\fR, \f(CW\*(C`map\*(C'\fR, \f(CW\*(C`my\*(C'\fR, \f(CW\*(C`no\*(C'\fR, \f(CW\*(C`our\*(C'\fR, \f(CW\*(C`prototype\*(C'\fR, -\&\f(CW\*(C`qx\*(C'\fR, \f(CW\*(C`qw\*(C'\fR, \f(CW\*(C`readline\*(C'\fR, \f(CW\*(C`readpipe\*(C'\fR, \f(CW\*(C`ref\*(C'\fR, \f(CW\*(C`sub*\*(C'\fR, \f(CW\*(C`sysopen\*(C'\fR, \f(CW\*(C`tie\*(C'\fR, -\&\f(CW\*(C`tied\*(C'\fR, \f(CW\*(C`uc\*(C'\fR, \f(CW\*(C`ucfirst\*(C'\fR, \f(CW\*(C`untie\*(C'\fR, \f(CW\*(C`use\*(C'\fR -.Sp -* \- \f(CW\*(C`sub\*(C'\fR was a keyword in perl4, but in perl5 it is an -operator, which can be used in expressions. -.IP "Functions obsoleted in perl5" 4 -.IX Item "Functions obsoleted in perl5" -\&\f(CW\*(C`dbmclose\*(C'\fR, \f(CW\*(C`dbmopen\*(C'\fR -.Sh "Portability" -.IX Xref "portability Unix portable" -.IX Subsection "Portability" -Perl was born in Unix and can therefore access all common Unix -system calls. In non-Unix environments, the functionality of some -Unix system calls may not be available, or details of the available -functionality may differ slightly. The Perl functions affected -by this are: -.PP -\&\f(CW\*(C`\-X\*(C'\fR, \f(CW\*(C`binmode\*(C'\fR, \f(CW\*(C`chmod\*(C'\fR, \f(CW\*(C`chown\*(C'\fR, \f(CW\*(C`chroot\*(C'\fR, \f(CW\*(C`crypt\*(C'\fR, -\&\f(CW\*(C`dbmclose\*(C'\fR, \f(CW\*(C`dbmopen\*(C'\fR, \f(CW\*(C`dump\*(C'\fR, \f(CW\*(C`endgrent\*(C'\fR, \f(CW\*(C`endhostent\*(C'\fR, -\&\f(CW\*(C`endnetent\*(C'\fR, \f(CW\*(C`endprotoent\*(C'\fR, \f(CW\*(C`endpwent\*(C'\fR, \f(CW\*(C`endservent\*(C'\fR, \f(CW\*(C`exec\*(C'\fR, -\&\f(CW\*(C`fcntl\*(C'\fR, \f(CW\*(C`flock\*(C'\fR, \f(CW\*(C`fork\*(C'\fR, \f(CW\*(C`getgrent\*(C'\fR, \f(CW\*(C`getgrgid\*(C'\fR, \f(CW\*(C`gethostbyname\*(C'\fR, -\&\f(CW\*(C`gethostent\*(C'\fR, \f(CW\*(C`getlogin\*(C'\fR, \f(CW\*(C`getnetbyaddr\*(C'\fR, \f(CW\*(C`getnetbyname\*(C'\fR, \f(CW\*(C`getnetent\*(C'\fR, -\&\f(CW\*(C`getppid\*(C'\fR, \f(CW\*(C`getpgrp\*(C'\fR, \f(CW\*(C`getpriority\*(C'\fR, \f(CW\*(C`getprotobynumber\*(C'\fR, -\&\f(CW\*(C`getprotoent\*(C'\fR, \f(CW\*(C`getpwent\*(C'\fR, \f(CW\*(C`getpwnam\*(C'\fR, \f(CW\*(C`getpwuid\*(C'\fR, -\&\f(CW\*(C`getservbyport\*(C'\fR, \f(CW\*(C`getservent\*(C'\fR, \f(CW\*(C`getsockopt\*(C'\fR, \f(CW\*(C`glob\*(C'\fR, \f(CW\*(C`ioctl\*(C'\fR, -\&\f(CW\*(C`kill\*(C'\fR, \f(CW\*(C`link\*(C'\fR, \f(CW\*(C`lstat\*(C'\fR, \f(CW\*(C`msgctl\*(C'\fR, \f(CW\*(C`msgget\*(C'\fR, \f(CW\*(C`msgrcv\*(C'\fR, -\&\f(CW\*(C`msgsnd\*(C'\fR, \f(CW\*(C`open\*(C'\fR, \f(CW\*(C`pipe\*(C'\fR, \f(CW\*(C`readlink\*(C'\fR, \f(CW\*(C`rename\*(C'\fR, \f(CW\*(C`select\*(C'\fR, \f(CW\*(C`semctl\*(C'\fR, -\&\f(CW\*(C`semget\*(C'\fR, \f(CW\*(C`semop\*(C'\fR, \f(CW\*(C`setgrent\*(C'\fR, \f(CW\*(C`sethostent\*(C'\fR, \f(CW\*(C`setnetent\*(C'\fR, -\&\f(CW\*(C`setpgrp\*(C'\fR, \f(CW\*(C`setpriority\*(C'\fR, \f(CW\*(C`setprotoent\*(C'\fR, \f(CW\*(C`setpwent\*(C'\fR, -\&\f(CW\*(C`setservent\*(C'\fR, \f(CW\*(C`setsockopt\*(C'\fR, \f(CW\*(C`shmctl\*(C'\fR, \f(CW\*(C`shmget\*(C'\fR, \f(CW\*(C`shmread\*(C'\fR, -\&\f(CW\*(C`shmwrite\*(C'\fR, \f(CW\*(C`socket\*(C'\fR, \f(CW\*(C`socketpair\*(C'\fR, -\&\f(CW\*(C`stat\*(C'\fR, \f(CW\*(C`symlink\*(C'\fR, \f(CW\*(C`syscall\*(C'\fR, \f(CW\*(C`sysopen\*(C'\fR, \f(CW\*(C`system\*(C'\fR, -\&\f(CW\*(C`times\*(C'\fR, \f(CW\*(C`truncate\*(C'\fR, \f(CW\*(C`umask\*(C'\fR, \f(CW\*(C`unlink\*(C'\fR, -\&\f(CW\*(C`utime\*(C'\fR, \f(CW\*(C`wait\*(C'\fR, \f(CW\*(C`waitpid\*(C'\fR -.PP -For more information about the portability of these functions, see -perlport and other available platform-specific documentation. -.Sh "Alphabetical Listing of Perl Functions" -.IX Subsection "Alphabetical Listing of Perl Functions" -.IP "\-X \s-1FILEHANDLE\s0" 8 -.IX Xref "-r -w -x -o -R -W -X -O -e -z -s -f -d -l -p -S -b -c -t -u -g -k -T -B -M -A -C" -.IX Item "-X FILEHANDLE" -.PD 0 -.IP "\-X \s-1EXPR\s0" 8 -.IX Item "-X EXPR" -.IP "\-X" 8 -.IX Item "-X" -.PD -A file test, where X is one of the letters listed below. This unary -operator takes one argument, either a filename or a filehandle, and -tests the associated file to see if something is true about it. If the -argument is omitted, tests \f(CW$_\fR, except for \f(CW\*(C`\-t\*(C'\fR, which tests \s-1STDIN\s0. -Unless otherwise documented, it returns \f(CW1\fR for true and \f(CW''\fR for false, or -the undefined value if the file doesn't exist. Despite the funny -names, precedence is the same as any other named unary operator, and -the argument may be parenthesized like any other unary operator. The -operator may be any of: -.Sp -.Vb 4 -\& -r File is readable by effective uid/gid. -\& -w File is writable by effective uid/gid. -\& -x File is executable by effective uid/gid. -\& -o File is owned by effective uid. -.Ve -.Sp -.Vb 4 -\& -R File is readable by real uid/gid. -\& -W File is writable by real uid/gid. -\& -X File is executable by real uid/gid. -\& -O File is owned by real uid. -.Ve -.Sp -.Vb 3 -\& -e File exists. -\& -z File has zero size (is empty). -\& -s File has nonzero size (returns size in bytes). -.Ve -.Sp -.Vb 8 -\& -f File is a plain file. -\& -d File is a directory. -\& -l File is a symbolic link. -\& -p File is a named pipe (FIFO), or Filehandle is a pipe. -\& -S File is a socket. -\& -b File is a block special file. -\& -c File is a character special file. -\& -t Filehandle is opened to a tty. -.Ve -.Sp -.Vb 3 -\& -u File has setuid bit set. -\& -g File has setgid bit set. -\& -k File has sticky bit set. -.Ve -.Sp -.Vb 2 -\& -T File is an ASCII text file (heuristic guess). -\& -B File is a "binary" file (opposite of -T). -.Ve -.Sp -.Vb 3 -\& -M Script start time minus file modification time, in days. -\& -A Same for access time. -\& -C Same for inode change time (Unix, may differ for other platforms) -.Ve -.Sp -Example: -.Sp -.Vb 5 -\& while (<>) { -\& chomp; -\& next unless -f $_; # ignore specials -\& #... -\& } -.Ve -.Sp -The interpretation of the file permission operators \f(CW\*(C`\-r\*(C'\fR, \f(CW\*(C`\-R\*(C'\fR, -\&\f(CW\*(C`\-w\*(C'\fR, \f(CW\*(C`\-W\*(C'\fR, \f(CW\*(C`\-x\*(C'\fR, and \f(CW\*(C`\-X\*(C'\fR is by default based solely on the mode -of the file and the uids and gids of the user. There may be other -reasons you can't actually read, write, or execute the file. Such -reasons may be for example network filesystem access controls, ACLs -(access control lists), read-only filesystems, and unrecognized -executable formats. -.Sp -Also note that, for the superuser on the local filesystems, the \f(CW\*(C`\-r\*(C'\fR, -\&\f(CW\*(C`\-R\*(C'\fR, \f(CW\*(C`\-w\*(C'\fR, and \f(CW\*(C`\-W\*(C'\fR tests always return 1, and \f(CW\*(C`\-x\*(C'\fR and \f(CW\*(C`\-X\*(C'\fR return 1 -if any execute bit is set in the mode. Scripts run by the superuser -may thus need to do a \fIstat()\fR to determine the actual mode of the file, -or temporarily set their effective uid to something else. -.Sp -If you are using ACLs, there is a pragma called \f(CW\*(C`filetest\*(C'\fR that may -produce more accurate results than the bare \fIstat()\fR mode bits. -When under the \f(CW\*(C`use filetest 'access'\*(C'\fR the above-mentioned filetests -will test whether the permission can (not) be granted using the -\&\fIaccess()\fR family of system calls. Also note that the \f(CW\*(C`\-x\*(C'\fR and \f(CW\*(C`\-X\*(C'\fR may -under this pragma return true even if there are no execute permission -bits set (nor any extra execute permission ACLs). This strangeness is -due to the underlying system calls' definitions. Read the -documentation for the \f(CW\*(C`filetest\*(C'\fR pragma for more information. -.Sp -Note that \f(CW\*(C`\-s/a/b/\*(C'\fR does not do a negated substitution. Saying -\&\f(CW\*(C`\-exp($foo)\*(C'\fR still works as expected, however\*(--only single letters -following a minus are interpreted as file tests. -.Sp -The \f(CW\*(C`\-T\*(C'\fR and \f(CW\*(C`\-B\*(C'\fR switches work as follows. The first block or so of the -file is examined for odd characters such as strange control codes or -characters with the high bit set. If too many strange characters (>30%) -are found, it's a \f(CW\*(C`\-B\*(C'\fR file; otherwise it's a \f(CW\*(C`\-T\*(C'\fR file. Also, any file -containing null in the first block is considered a binary file. If \f(CW\*(C`\-T\*(C'\fR -or \f(CW\*(C`\-B\*(C'\fR is used on a filehandle, the current \s-1IO\s0 buffer is examined -rather than the first block. Both \f(CW\*(C`\-T\*(C'\fR and \f(CW\*(C`\-B\*(C'\fR return true on a null -file, or a file at \s-1EOF\s0 when testing a filehandle. Because you have to -read a file to do the \f(CW\*(C`\-T\*(C'\fR test, on most occasions you want to use a \f(CW\*(C`\-f\*(C'\fR -against the file first, as in \f(CW\*(C`next unless \-f $file && \-T $file\*(C'\fR. -.Sp -If any of the file tests (or either the \f(CW\*(C`stat\*(C'\fR or \f(CW\*(C`lstat\*(C'\fR operators) are given -the special filehandle consisting of a solitary underline, then the stat -structure of the previous file test (or stat operator) is used, saving -a system call. (This doesn't work with \f(CW\*(C`\-t\*(C'\fR, and you need to remember -that \fIlstat()\fR and \f(CW\*(C`\-l\*(C'\fR will leave values in the stat structure for the -symbolic link, not the real file.) (Also, if the stat buffer was filled by -an \f(CW\*(C`lstat\*(C'\fR call, \f(CW\*(C`\-T\*(C'\fR and \f(CW\*(C`\-B\*(C'\fR will reset it with the results of \f(CW\*(C`stat _\*(C'\fR). -Example: -.Sp -.Vb 1 -\& print "Can do.\en" if -r $a || -w _ || -x _; -.Ve -.Sp -.Vb 9 -\& stat($filename); -\& print "Readable\en" if -r _; -\& print "Writable\en" if -w _; -\& print "Executable\en" if -x _; -\& print "Setuid\en" if -u _; -\& print "Setgid\en" if -g _; -\& print "Sticky\en" if -k _; -\& print "Text\en" if -T _; -\& print "Binary\en" if -B _; -.Ve -.IP "abs \s-1VALUE\s0" 8 -.IX Xref "abs absolute" -.IX Item "abs VALUE" -.PD 0 -.IP "abs" 8 -.IX Item "abs" -.PD -Returns the absolute value of its argument. -If \s-1VALUE\s0 is omitted, uses \f(CW$_\fR. -.IP "accept \s-1NEWSOCKET\s0,GENERICSOCKET" 8 -.IX Xref "accept" -.IX Item "accept NEWSOCKET,GENERICSOCKET" -Accepts an incoming socket connect, just as the \fIaccept\fR\|(2) system call -does. Returns the packed address if it succeeded, false otherwise. -See the example in \*(L"Sockets: Client/Server Communication\*(R" in perlipc. -.Sp -On systems that support a close-on-exec flag on files, the flag will -be set for the newly opened file descriptor, as determined by the -value of $^F. See \*(L"$^F\*(R" in perlvar. -.IP "alarm \s-1SECONDS\s0" 8 -.IX Xref "alarm SIGALRM timer" -.IX Item "alarm SECONDS" -.PD 0 -.IP "alarm" 8 -.IX Item "alarm" -.PD -Arranges to have a \s-1SIGALRM\s0 delivered to this process after the -specified number of wallclock seconds has elapsed. If \s-1SECONDS\s0 is not -specified, the value stored in \f(CW$_\fR is used. (On some machines, -unfortunately, the elapsed time may be up to one second less or more -than you specified because of how seconds are counted, and process -scheduling may delay the delivery of the signal even further.) -.Sp -Only one timer may be counting at once. Each call disables the -previous timer, and an argument of \f(CW0\fR may be supplied to cancel the -previous timer without starting a new one. The returned value is the -amount of time remaining on the previous timer. -.Sp -For delays of finer granularity than one second, you may use Perl's -four-argument version of \fIselect()\fR leaving the first three arguments -undefined, or you might be able to use the \f(CW\*(C`syscall\*(C'\fR interface to -access \fIsetitimer\fR\|(2) if your system supports it. The Time::HiRes -module (from \s-1CPAN\s0, and starting from Perl 5.8 part of the standard -distribution) may also prove useful. -.Sp -It is usually a mistake to intermix \f(CW\*(C`alarm\*(C'\fR and \f(CW\*(C`sleep\*(C'\fR calls. -(\f(CW\*(C`sleep\*(C'\fR may be internally implemented in your system with \f(CW\*(C`alarm\*(C'\fR) -.Sp -If you want to use \f(CW\*(C`alarm\*(C'\fR to time out a system call you need to use an -\&\f(CW\*(C`eval\*(C'\fR/\f(CW\*(C`die\*(C'\fR pair. You can't rely on the alarm causing the system call to -fail with \f(CW$!\fR set to \f(CW\*(C`EINTR\*(C'\fR because Perl sets up signal handlers to -restart system calls on some systems. Using \f(CW\*(C`eval\*(C'\fR/\f(CW\*(C`die\*(C'\fR always works, -modulo the caveats given in \*(L"Signals\*(R" in perlipc. -.Sp -.Vb 13 -\& eval { -\& local $SIG{ALRM} = sub { die "alarm\en" }; # NB: \en required -\& alarm $timeout; -\& $nread = sysread SOCKET, $buffer, $size; -\& alarm 0; -\& }; -\& if ($@) { -\& die unless $@ eq "alarm\en"; # propagate unexpected errors -\& # timed out -\& } -\& else { -\& # didn't -\& } -.Ve -.Sp -For more information see perlipc. -.IP "atan2 Y,X" 8 -.IX Xref "atan2 arctangent tan tangent" -.IX Item "atan2 Y,X" -Returns the arctangent of Y/X in the range \-PI to \s-1PI\s0. -.Sp -For the tangent operation, you may use the \f(CW\*(C`Math::Trig::tan\*(C'\fR -function, or use the familiar relation: -.Sp -.Vb 1 -\& sub tan { sin($_[0]) / cos($_[0]) } -.Ve -.Sp -Note that atan2(0, 0) is not well\-defined. -.IP "bind \s-1SOCKET\s0,NAME" 8 -.IX Xref "bind" -.IX Item "bind SOCKET,NAME" -Binds a network address to a socket, just as the bind system call -does. Returns true if it succeeded, false otherwise. \s-1NAME\s0 should be a -packed address of the appropriate type for the socket. See the examples in -\&\*(L"Sockets: Client/Server Communication\*(R" in perlipc. -.IP "binmode \s-1FILEHANDLE\s0, \s-1LAYER\s0" 8 -.IX Xref "binmode binary text DOS Windows" -.IX Item "binmode FILEHANDLE, LAYER" -.PD 0 -.IP "binmode \s-1FILEHANDLE\s0" 8 -.IX Item "binmode FILEHANDLE" -.PD -Arranges for \s-1FILEHANDLE\s0 to be read or written in \*(L"binary\*(R" or \*(L"text\*(R" -mode on systems where the run-time libraries distinguish between -binary and text files. If \s-1FILEHANDLE\s0 is an expression, the value is -taken as the name of the filehandle. Returns true on success, -otherwise it returns \f(CW\*(C`undef\*(C'\fR and sets \f(CW$!\fR (errno). -.Sp -On some systems (in general, \s-1DOS\s0 and Windows-based systems) \fIbinmode()\fR -is necessary when you're not working with a text file. For the sake -of portability it is a good idea to always use it when appropriate, -and to never use it when it isn't appropriate. Also, people can -set their I/O to be by default \s-1UTF\-8\s0 encoded Unicode, not bytes. -.Sp -In other words: regardless of platform, use \fIbinmode()\fR on binary data, -like for example images. -.Sp -If \s-1LAYER\s0 is present it is a single string, but may contain multiple -directives. The directives alter the behaviour of the file handle. -When \s-1LAYER\s0 is present using binmode on text file makes sense. -.Sp -If \s-1LAYER\s0 is omitted or specified as \f(CW\*(C`:raw\*(C'\fR the filehandle is made -suitable for passing binary data. This includes turning off possible \s-1CRLF\s0 -translation and marking it as bytes (as opposed to Unicode characters). -Note that, despite what may be implied in \fI\*(L"Programming Perl\*(R"\fR (the -Camel) or elsewhere, \f(CW\*(C`:raw\*(C'\fR is \fInot\fR the simply inverse of \f(CW\*(C`:crlf\*(C'\fR -\&\*(-- other layers which would affect binary nature of the stream are -\&\fIalso\fR disabled. See PerlIO, perlrun and the discussion about the -\&\s-1PERLIO\s0 environment variable. -.Sp -The \f(CW\*(C`:bytes\*(C'\fR, \f(CW\*(C`:crlf\*(C'\fR, and \f(CW\*(C`:utf8\*(C'\fR, and any other directives of the -form \f(CW\*(C`:...\*(C'\fR, are called I/O \fIlayers\fR. The \f(CW\*(C`open\*(C'\fR pragma can be used to -establish default I/O layers. See open. -.Sp -\&\fIThe \s-1LAYER\s0 parameter of the \fIbinmode()\fI function is described as \*(L"\s-1DISCIPLINE\s0\*(R" -in \*(L"Programming Perl, 3rd Edition\*(R". However, since the publishing of this -book, by many known as \*(L"Camel \s-1III\s0\*(R", the consensus of the naming of this -functionality has moved from \*(L"discipline\*(R" to \*(L"layer\*(R". All documentation -of this version of Perl therefore refers to \*(L"layers\*(R" rather than to -\&\*(L"disciplines\*(R". Now back to the regularly scheduled documentation...\fR -.Sp -To mark \s-1FILEHANDLE\s0 as \s-1UTF\-8\s0, use \f(CW\*(C`:utf8\*(C'\fR. -.Sp -In general, \fIbinmode()\fR should be called after \fIopen()\fR but before any I/O -is done on the filehandle. Calling \fIbinmode()\fR will normally flush any -pending buffered output data (and perhaps pending input data) on the -handle. An exception to this is the \f(CW\*(C`:encoding\*(C'\fR layer that -changes the default character encoding of the handle, see open. -The \f(CW\*(C`:encoding\*(C'\fR layer sometimes needs to be called in -mid\-stream, and it doesn't flush the stream. The \f(CW\*(C`:encoding\*(C'\fR -also implicitly pushes on top of itself the \f(CW\*(C`:utf8\*(C'\fR layer because -internally Perl will operate on \s-1UTF\-8\s0 encoded Unicode characters. -.Sp -The operating system, device drivers, C libraries, and Perl run-time -system all work together to let the programmer treat a single -character (\f(CW\*(C`\en\*(C'\fR) as the line terminator, irrespective of the external -representation. On many operating systems, the native text file -representation matches the internal representation, but on some -platforms the external representation of \f(CW\*(C`\en\*(C'\fR is made up of more than -one character. -.Sp -Mac \s-1OS\s0, all variants of Unix, and Stream_LF files on \s-1VMS\s0 use a single -character to end each line in the external representation of text (even -though that single character is \s-1CARRIAGE\s0 \s-1RETURN\s0 on Mac \s-1OS\s0 and \s-1LINE\s0 \s-1FEED\s0 -on Unix and most \s-1VMS\s0 files). In other systems like \s-1OS/2\s0, \s-1DOS\s0 and the -various flavors of MS-Windows your program sees a \f(CW\*(C`\en\*(C'\fR as a simple \f(CW\*(C`\ecJ\*(C'\fR, -but what's stored in text files are the two characters \f(CW\*(C`\ecM\ecJ\*(C'\fR. That -means that, if you don't use \fIbinmode()\fR on these systems, \f(CW\*(C`\ecM\ecJ\*(C'\fR -sequences on disk will be converted to \f(CW\*(C`\en\*(C'\fR on input, and any \f(CW\*(C`\en\*(C'\fR in -your program will be converted back to \f(CW\*(C`\ecM\ecJ\*(C'\fR on output. This is what -you want for text files, but it can be disastrous for binary files. -.Sp -Another consequence of using \fIbinmode()\fR (on some systems) is that -special end-of-file markers will be seen as part of the data stream. -For systems from the Microsoft family this means that if your binary -data contains \f(CW\*(C`\ecZ\*(C'\fR, the I/O subsystem will regard it as the end of -the file, unless you use \fIbinmode()\fR. -.Sp -\&\fIbinmode()\fR is not only important for \fIreadline()\fR and \fIprint()\fR operations, -but also when using \fIread()\fR, \fIseek()\fR, \fIsysread()\fR, \fIsyswrite()\fR and \fItell()\fR -(see perlport for more details). See the \f(CW$/\fR and \f(CW\*(C`$\e\*(C'\fR variables -in perlvar for how to manually set your input and output -line-termination sequences. -.IP "bless \s-1REF\s0,CLASSNAME" 8 -.IX Xref "bless" -.IX Item "bless REF,CLASSNAME" -.PD 0 -.IP "bless \s-1REF\s0" 8 -.IX Item "bless REF" -.PD -This function tells the thingy referenced by \s-1REF\s0 that it is now an object -in the \s-1CLASSNAME\s0 package. If \s-1CLASSNAME\s0 is omitted, the current package -is used. Because a \f(CW\*(C`bless\*(C'\fR is often the last thing in a constructor, -it returns the reference for convenience. Always use the two-argument -version if a derived class might inherit the function doing the blessing. -See perltoot and perlobj for more about the blessing (and blessings) -of objects. -.Sp -Consider always blessing objects in CLASSNAMEs that are mixed case. -Namespaces with all lowercase names are considered reserved for -Perl pragmata. Builtin types have all uppercase names. To prevent -confusion, you may wish to avoid such package names as well. Make sure -that \s-1CLASSNAME\s0 is a true value. -.Sp -See \*(L"Perl Modules\*(R" in perlmod. -.IP "caller \s-1EXPR\s0" 8 -.IX Xref "caller call stack stack stack trace" -.IX Item "caller EXPR" -.PD 0 -.IP "caller" 8 -.IX Item "caller" -.PD -Returns the context of the current subroutine call. In scalar context, -returns the caller's package name if there is a caller, that is, if -we're in a subroutine or \f(CW\*(C`eval\*(C'\fR or \f(CW\*(C`require\*(C'\fR, and the undefined value -otherwise. In list context, returns -.Sp -.Vb 1 -\& ($package, $filename, $line) = caller; -.Ve -.Sp -With \s-1EXPR\s0, it returns some extra information that the debugger uses to -print a stack trace. The value of \s-1EXPR\s0 indicates how many call frames -to go back before the current one. -.Sp -.Vb 2 -\& ($package, $filename, $line, $subroutine, $hasargs, -\& $wantarray, $evaltext, $is_require, $hints, $bitmask) = caller($i); -.Ve -.Sp -Here \f(CW$subroutine\fR may be \f(CW\*(C`(eval)\*(C'\fR if the frame is not a subroutine -call, but an \f(CW\*(C`eval\*(C'\fR. In such a case additional elements \f(CW$evaltext\fR and -\&\f(CW$is_require\fR are set: \f(CW$is_require\fR is true if the frame is created by a -\&\f(CW\*(C`require\*(C'\fR or \f(CW\*(C`use\*(C'\fR statement, \f(CW$evaltext\fR contains the text of the -\&\f(CW\*(C`eval EXPR\*(C'\fR statement. In particular, for an \f(CW\*(C`eval BLOCK\*(C'\fR statement, -\&\f(CW$filename\fR is \f(CW\*(C`(eval)\*(C'\fR, but \f(CW$evaltext\fR is undefined. (Note also that -each \f(CW\*(C`use\*(C'\fR statement creates a \f(CW\*(C`require\*(C'\fR frame inside an \f(CW\*(C`eval EXPR\*(C'\fR -frame.) \f(CW$subroutine\fR may also be \f(CW\*(C`(unknown)\*(C'\fR if this particular -subroutine happens to have been deleted from the symbol table. -\&\f(CW$hasargs\fR is true if a new instance of \f(CW@_\fR was set up for the frame. -\&\f(CW$hints\fR and \f(CW$bitmask\fR contain pragmatic hints that the caller was -compiled with. The \f(CW$hints\fR and \f(CW$bitmask\fR values are subject to change -between versions of Perl, and are not meant for external use. -.Sp -Furthermore, when called from within the \s-1DB\s0 package, caller returns more -detailed information: it sets the list variable \f(CW@DB::args\fR to be the -arguments with which the subroutine was invoked. -.Sp -Be aware that the optimizer might have optimized call frames away before -\&\f(CW\*(C`caller\*(C'\fR had a chance to get the information. That means that \f(CWcaller(N)\fR -might not return information about the call frame you expect it do, for -\&\f(CW\*(C`N > 1\*(C'\fR. In particular, \f(CW@DB::args\fR might have information from the -previous time \f(CW\*(C`caller\*(C'\fR was called. -.IP "chdir \s-1EXPR\s0" 8 -.IX Xref "chdir cd" -.IX Item "chdir EXPR" -.PD 0 -.IP "chdir \s-1FILEHANDLE\s0" 8 -.IX Item "chdir FILEHANDLE" -.IP "chdir \s-1DIRHANDLE\s0" 8 -.IX Item "chdir DIRHANDLE" -.IP "chdir" 8 -.IX Item "chdir" -.PD -Changes the working directory to \s-1EXPR\s0, if possible. If \s-1EXPR\s0 is omitted, -changes to the directory specified by \f(CW$ENV{HOME}\fR, if set; if not, -changes to the directory specified by \f(CW$ENV{LOGDIR}\fR. (Under \s-1VMS\s0, the -variable \f(CW$ENV{SYS$LOGIN}\fR is also checked, and used if it is set.) If -neither is set, \f(CW\*(C`chdir\*(C'\fR does nothing. It returns true upon success, -false otherwise. See the example under \f(CW\*(C`die\*(C'\fR. -.Sp -On systems that support fchdir, you might pass a file handle or -directory handle as argument. On systems that don't support fchdir, -passing handles produces a fatal error at run time. -.IP "chmod \s-1LIST\s0" 8 -.IX Xref "chmod permission mode" -.IX Item "chmod LIST" -Changes the permissions of a list of files. The first element of the -list must be the numerical mode, which should probably be an octal -number, and which definitely should \fInot\fR be a string of octal digits: -\&\f(CW0644\fR is okay, \f(CW'0644'\fR is not. Returns the number of files -successfully changed. See also \*(L"oct\*(R", if all you have is a string. -.Sp -.Vb 6 -\& $cnt = chmod 0755, 'foo', 'bar'; -\& chmod 0755, @executables; -\& $mode = '0644'; chmod $mode, 'foo'; # !!! sets mode to -\& # --w----r-T -\& $mode = '0644'; chmod oct($mode), 'foo'; # this is better -\& $mode = 0644; chmod $mode, 'foo'; # this is best -.Ve -.Sp -On systems that support fchmod, you might pass file handles among the -files. On systems that don't support fchmod, passing file handles -produces a fatal error at run time. -.Sp -.Vb 3 -\& open(my $fh, "<", "foo"); -\& my $perm = (stat $fh)[2] & 07777; -\& chmod($perm | 0600, $fh); -.Ve -.Sp -You can also import the symbolic \f(CW\*(C`S_I*\*(C'\fR constants from the Fcntl -module: -.Sp -.Vb 1 -\& use Fcntl ':mode'; -.Ve -.Sp -.Vb 2 -\& chmod S_IRWXU|S_IRGRP|S_IXGRP|S_IROTH|S_IXOTH, @executables; -\& # This is identical to the chmod 0755 of the above example. -.Ve -.IP "chomp \s-1VARIABLE\s0" 8 -.IX Xref "chomp INPUT_RECORD_SEPARATOR $ newline eol" -.IX Item "chomp VARIABLE" -.PD 0 -.IP "chomp( \s-1LIST\s0 )" 8 -.IX Item "chomp( LIST )" -.IP "chomp" 8 -.IX Item "chomp" -.PD -This safer version of \*(L"chop\*(R" removes any trailing string -that corresponds to the current value of \f(CW$/\fR (also known as -\&\f(CW$INPUT_RECORD_SEPARATOR\fR in the \f(CW\*(C`English\*(C'\fR module). It returns the total -number of characters removed from all its arguments. It's often used to -remove the newline from the end of an input record when you're worried -that the final record may be missing its newline. When in paragraph -mode (\f(CW\*(C`$/ = ""\*(C'\fR), it removes all trailing newlines from the string. -When in slurp mode (\f(CW\*(C`$/ = undef\*(C'\fR) or fixed-length record mode (\f(CW$/\fR is -a reference to an integer or the like, see perlvar) \fIchomp()\fR won't -remove anything. -If \s-1VARIABLE\s0 is omitted, it chomps \f(CW$_\fR. Example: -.Sp -.Vb 5 -\& while (<>) { -\& chomp; # avoid \en on last field -\& @array = split(/:/); -\& # ... -\& } -.Ve -.Sp -If \s-1VARIABLE\s0 is a hash, it chomps the hash's values, but not its keys. -.Sp - diff --git a/tests/examplefiles/phpMyAdmin.spec b/tests/examplefiles/phpMyAdmin.spec deleted file mode 100644 index 120fbc92..00000000 --- a/tests/examplefiles/phpMyAdmin.spec +++ /dev/null @@ -1,163 +0,0 @@ -%define _myadminpath /var/www/myadmin -%define pkgrelease rc1 -%define microrelease 1 - -Name: phpMyAdmin -Version: 3.1.1 -Release: %{pkgrelease}.%{microrelease} -License: GPL -Group: Applications/Databases/Interfaces -Source0: http://prdownloads.sourceforge.net/phpmyadmin/%{name}-%{version}-%{pkgrelease}.tar.bz2 -Source1: phpMyAdmin-http.conf -URL: http://sourceforge.net/projects/phpmyadmin/ -Requires: mysql -Requires: php-mysql -Buildarch: noarch -#BuildRoot: %{_tmppath}/%{name}-root - -Summary: phpMyAdmin - web-based MySQL administration - -%description -phpMyAdmin can manage a whole MySQL-server (needs a super-user) but -also a single database. To accomplish the latter you'll need a -properly set up MySQL-user which can read/write only the desired -database. It's up to you to look up the appropiate part in the MySQL -manual. Currently phpMyAdmin can: - - create and drop databases - - create, copy, drop and alter tables - - delete, edit and add fields - - execute any SQL-statement, even batch-queries - - manage keys on fields - - load text files into tables - - create (*) and read dumps of tables - - export (*) and import data to CSV values - - administer multiple servers and single databases - - check referencial integrity - - create complex queries automatically connecting required tables - - create PDF graphics of your database layout - - communicate in more than 38 different languages - - -%prep -%setup -q -n %{name}-%{version}-%{pkgrelease} - - -%build - - -%install -[ "${RPM_BUILD_ROOT}" != "/" ] && [ -d "${RPM_BUILD_ROOT}" ] && \ - rm -rf "${RPM_BUILD_ROOT}" - -# Create directories. - -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/{css,js,lang,libraries,themes} -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/libraries/{auth,dbg,dbi,engines} -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/libraries/{export,tcpdf,import} -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/libraries/transformations -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/libraries/tcpdf/font -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/themes/{darkblue_orange,original} -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/themes/darkblue_orange/{css,img} -install -d "${RPM_BUILD_ROOT}%{_myadminpath}"/themes/original/{css,img} - -# Install files. - -install libraries/config.default.php \ - "${RPM_BUILD_ROOT}%{_myadminpath}"/config.inc.php -install *.{php,ico} "${RPM_BUILD_ROOT}%{_myadminpath}"/ -install ChangeLog LICENSE README "${RPM_BUILD_ROOT}%{_myadminpath}"/ -install Documentation.html docs.css "${RPM_BUILD_ROOT}%{_myadminpath}"/ -install css/* "${RPM_BUILD_ROOT}%{_myadminpath}/css"/ -install js/* "${RPM_BUILD_ROOT}%{_myadminpath}/js"/ -install lang/*.php "${RPM_BUILD_ROOT}%{_myadminpath}/lang"/ -install libraries/*.php "${RPM_BUILD_ROOT}%{_myadminpath}/libraries"/ -install libraries/auth/*.php "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/auth"/ -install libraries/dbg/*.php "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/dbg"/ -install libraries/dbi/*.php "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/dbi"/ -install libraries/engines/*.php \ - "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/engines"/ -install libraries/export/*.php \ - "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/export"/ -install libraries/tcpdf/*.php "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/tcpdf"/ -install libraries/tcpdf/font/*.{php,z} \ - "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/tcpdf/font"/ -install libraries/import/*.php \ - "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/import"/ -install libraries/transformations/*.php \ - "${RPM_BUILD_ROOT}%{_myadminpath}/libraries/transformations"/ -install themes/darkblue_orange/*.{php,png} \ - "${RPM_BUILD_ROOT}%{_myadminpath}/themes/darkblue_orange"/ -install themes/darkblue_orange/css/*.php \ - "${RPM_BUILD_ROOT}%{_myadminpath}/themes/darkblue_orange/css"/ -install themes/darkblue_orange/img/*.{png,ico} \ - "${RPM_BUILD_ROOT}%{_myadminpath}/themes/darkblue_orange/img"/ -install themes/original/*.{php,png} \ - "${RPM_BUILD_ROOT}%{_myadminpath}/themes/original"/ -install themes/original/css/*.php \ - "${RPM_BUILD_ROOT}%{_myadminpath}/themes/original/css"/ -install themes/original/img/*.{png,ico} \ - "${RPM_BUILD_ROOT}%{_myadminpath}/themes/original/img"/ - -# Create documentation directories. - -DOCROOT="${RPM_BUILD_ROOT}%{_docdir}/%{name}-%{version}" -install -d "${DOCROOT}" -install -d "${DOCROOT}"/{lang,scripts,transformations} - -# Install documentation files. - -install RELEASE-DATE-* "${DOCROOT}"/ -install CREDITS ChangeLog INSTALL LICENSE "${DOCROOT}"/ -install README TODO "${DOCROOT}"/ -install Documentation.* docs.css "${DOCROOT}"/ -install translators.html "${DOCROOT}"/ -install lang/*.sh "${DOCROOT}"/lang/ -install scripts/* "${DOCROOT}"/scripts/ -install libraries/tcpdf/README "${DOCROOT}"/README.tcpdf -install libraries/import/README "${DOCROOT}"/README.import -install libraries/transformations/README "${DOCROOT}"/transformations/ -install libraries/transformations/TEMPLATE* "${DOCROOT}"/transformations/ -install libraries/transformations/*.sh "${DOCROOT}"/transformations/ - -# Install configuration file for Apache. - -install -d "${RPM_BUILD_ROOT}%{_sysconfdir}/httpd/conf.d" -install "%{SOURCE1}" \ - "${RPM_BUILD_ROOT}%{_sysconfdir}/httpd/conf.d/phpMyAdmin.conf" - -# Generate non-configuration file list. - -(cd "${RPM_BUILD_ROOT}"; ls -d ."%{_myadminpath}"/*) | - sed -e '/\/config\.inc\.php$/d' -e 's/^.//' > files.list - - - -%clean -[ "${RPM_BUILD_ROOT}" != "/" ] && [ -d "${RPM_BUILD_ROOT}" ] && \ - rm -rf "${RPM_BUILD_ROOT}" - - -%files -f files.list -%defattr(644, root, root, 755) -%doc %{_docdir}/%{name}-%{version} -%dir %{_myadminpath} -%attr(640,root,apache) %config(noreplace) %verify(not size mtime md5) %{_myadminpath}/config.inc.php -%config(noreplace) %verify(not size mtime md5) %{_sysconfdir}/httpd/conf.d/* - - -%changelog -* Thu Feb 23 2006 Patrick Monnerat -- Version 2.8.0-rc1.1. - -* Thu Dec 22 2005 Patrick Monnerat -- Path "nullpw" to allow trying connection with null password after failure. -- Version 2.7.0-pl1.1. - -* Mon Aug 22 2005 Patrick Monnerat -- Version 2.6.3-pl1. - -* Wed Jul 21 2004 Patrick Monnerat -- Version 2.5.7-pl1. - -* Fri Nov 22 2002 Patrick Monnerat -- Version 2.3.0-rc1. diff --git a/tests/examplefiles/phpcomplete.vim b/tests/examplefiles/phpcomplete.vim deleted file mode 100644 index 17d74fd8..00000000 --- a/tests/examplefiles/phpcomplete.vim +++ /dev/null @@ -1,567 +0,0 @@ -" Vim completion script -" Language: PHP -" Maintainer: Mikolaj Machowski ( mikmach AT wp DOT pl ) -" Last Change: 2006 May 9 -" -" TODO: -" - Class aware completion: -" a) caching? -" - Switching to HTML (XML?) completion (SQL) inside of phpStrings -" - allow also for XML completion <- better do html_flavor for HTML -" completion -" - outside of getting parent tag may cause problems. Heh, even in -" perfect conditions GetLastOpenTag doesn't cooperate... Inside of -" phpStrings this can be even a bonus but outside of it is not the -" best situation - -function! phpcomplete#CompletePHP(findstart, base) - if a:findstart - unlet! b:php_menu - " Check if we are inside of PHP markup - let pos = getpos('.') - let phpbegin = searchpairpos('', 'bWn', - \ 'synIDattr(synID(line("."), col("."), 0), "name") =~? "string\|comment"') - let phpend = searchpairpos('', 'Wn', - \ 'synIDattr(synID(line("."), col("."), 0), "name") =~? "string\|comment"') - - if phpbegin == [0,0] && phpend == [0,0] - " We are outside of any PHP markup. Complete HTML - let htmlbegin = htmlcomplete#CompleteTags(1, '') - let cursor_col = pos[2] - let base = getline('.')[htmlbegin : cursor_col] - let b:php_menu = htmlcomplete#CompleteTags(0, base) - return htmlbegin - else - " locate the start of the word - let line = getline('.') - let start = col('.') - 1 - let curline = line('.') - let compl_begin = col('.') - 2 - while start >= 0 && line[start - 1] =~ '[a-zA-Z_0-9\x7f-\xff$]' - let start -= 1 - endwhile - let b:compl_context = getline('.')[0:compl_begin] - return start - - " We can be also inside of phpString with HTML tags. Deal with - " it later (time, not lines). - endif - - endif - " If exists b:php_menu it means completion was already constructed we - " don't need to do anything more - if exists("b:php_menu") - return b:php_menu - endif - " Initialize base return lists - let res = [] - let res2 = [] - " a:base is very short - we need context - if exists("b:compl_context") - let context = b:compl_context - unlet! b:compl_context - endif - - if !exists('g:php_builtin_functions') - call phpcomplete#LoadData() - endif - - let scontext = substitute(context, '\$\?[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*$', '', '') - - if scontext =~ '\(=\s*new\|extends\)\s\+$' - " Complete class name - " Internal solution for finding classes in current file. - let file = getline(1, '$') - call filter(file, - \ 'v:val =~ "class\\s\\+[a-zA-Z_\\x7f-\\xff][a-zA-Z_0-9\\x7f-\\xff]*\\s*("') - let fnames = join(map(tagfiles(), 'escape(v:val, " \\#%")')) - let jfile = join(file, ' ') - let int_values = split(jfile, 'class\s\+') - let int_classes = {} - for i in int_values - let c_name = matchstr(i, '^[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*') - if c_name != '' - let int_classes[c_name] = '' - endif - endfor - - " Prepare list of classes from tags file - let ext_classes = {} - let fnames = join(map(tagfiles(), 'escape(v:val, " \\#%")')) - if fnames != '' - exe 'silent! vimgrep /^'.a:base.'.*\tc\(\t\|$\)/j '.fnames - let qflist = getqflist() - if len(qflist) > 0 - for field in qflist - " [:space:] thing: we don't have to be so strict when - " dealing with tags files - entries there were already - " checked by ctags. - let item = matchstr(field['text'], '^[^[:space:]]\+') - let ext_classes[item] = '' - endfor - endif - endif - - " Prepare list of built in classes from g:php_builtin_functions - if !exists("g:php_omni_bi_classes") - let g:php_omni_bi_classes = {} - for i in keys(g:php_builtin_object_functions) - let g:php_omni_bi_classes[substitute(i, '::.*$', '', '')] = '' - endfor - endif - - let classes = sort(keys(int_classes)) - let classes += sort(keys(ext_classes)) - let classes += sort(keys(g:php_omni_bi_classes)) - - for m in classes - if m =~ '^'.a:base - call add(res, m) - endif - endfor - - let final_menu = [] - for i in res - let final_menu += [{'word':i, 'kind':'c'}] - endfor - - return final_menu - - elseif scontext =~ '\(->\|::\)$' - " Complete user functions and variables - " Internal solution for current file. - " That seems as unnecessary repeating of functions but there are - " few not so subtle differences as not appending of $ and addition - " of 'kind' tag (not necessary in regular completion) - - if scontext =~ '->$' && scontext !~ '\$this->$' - - " Get name of the class - let classname = phpcomplete#GetClassName(scontext) - - " Get location of class definition, we have to iterate through all - " tags files separately because we need relative path from current - " file to the exact file (tags file can be in different dir) - if classname != '' - let classlocation = phpcomplete#GetClassLocation(classname) - else - let classlocation = '' - endif - - if classlocation == 'VIMPHP_BUILTINOBJECT' - - for object in keys(g:php_builtin_object_functions) - if object =~ '^'.classname - let res += [{'word':substitute(object, '.*::', '', ''), - \ 'info': g:php_builtin_object_functions[object]}] - endif - endfor - - return res - - endif - - if filereadable(classlocation) - let classfile = readfile(classlocation) - let classcontent = '' - let classcontent .= "\n".phpcomplete#GetClassContents(classfile, classname) - let sccontent = split(classcontent, "\n") - - " YES, YES, YES! - we have whole content including extends! - " Now we need to get two elements: public functions and public - " vars - " NO, NO, NO! - third separate filtering looking for content - " :(, but all of them have differences. To squeeze them into - " one implementation would require many additional arguments - " and ifs. No good solution - " Functions declared with public keyword or without any - " keyword are public - let functions = filter(deepcopy(sccontent), - \ 'v:val =~ "^\\s*\\(static\\s\\+\\|public\\s\\+\\)*function"') - let jfuncs = join(functions, ' ') - let sfuncs = split(jfuncs, 'function\s\+') - let c_functions = {} - for i in sfuncs - let f_name = matchstr(i, - \ '^&\?\zs[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\ze') - let f_args = matchstr(i, - \ '^&\?[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\s*(\zs.\{-}\ze)\_s*{') - if f_name != '' - let c_functions[f_name.'('] = f_args - endif - endfor - " Variables declared with var or with public keyword are - " public - let variables = filter(deepcopy(sccontent), - \ 'v:val =~ "^\\s*\\(public\\|var\\)\\s\\+\\$"') - let jvars = join(variables, ' ') - let svars = split(jvars, '\$') - let c_variables = {} - for i in svars - let c_var = matchstr(i, - \ '^\zs[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\ze') - if c_var != '' - let c_variables[c_var] = '' - endif - endfor - - let all_values = {} - call extend(all_values, c_functions) - call extend(all_values, c_variables) - - for m in sort(keys(all_values)) - if m =~ '^'.a:base && m !~ '::' - call add(res, m) - elseif m =~ '::'.a:base - call add(res2, m) - endif - endfor - - let start_list = res + res2 - - let final_list = [] - for i in start_list - if has_key(c_variables, i) - let class = ' ' - if all_values[i] != '' - let class = i.' class ' - endif - let final_list += - \ [{'word':i, - \ 'info':class.all_values[i], - \ 'kind':'v'}] - else - let final_list += - \ [{'word':substitute(i, '.*::', '', ''), - \ 'info':i.all_values[i].')', - \ 'kind':'f'}] - endif - endfor - - return final_list - - endif - - endif - - if a:base =~ '^\$' - let adddollar = '$' - else - let adddollar = '' - endif - let file = getline(1, '$') - let jfile = join(file, ' ') - let sfile = split(jfile, '\$') - let int_vars = {} - for i in sfile - if i =~ '^\$[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\s*=\s*new' - let val = matchstr(i, '^[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*').'->' - else - let val = matchstr(i, '^[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*') - endif - if val !~ '' - let int_vars[adddollar.val] = '' - endif - endfor - - " ctags has good support for PHP, use tags file for external - " variables - let fnames = join(map(tagfiles(), 'escape(v:val, " \\#%")')) - let ext_vars = {} - if fnames != '' - let sbase = substitute(a:base, '^\$', '', '') - exe 'silent! vimgrep /^'.sbase.'.*\tv\(\t\|$\)/j '.fnames - let qflist = getqflist() - if len(qflist) > 0 - for field in qflist - let item = matchstr(field['text'], '^[^[:space:]]\+') - " Add -> if it is possible object declaration - let classname = '' - if field['text'] =~ item.'\s*=\s*new\s\+' - let item = item.'->' - let classname = matchstr(field['text'], - \ '=\s*new\s\+\zs[a-zA-Z_0-9\x7f-\xff]\+\ze') - endif - let ext_vars[adddollar.item] = classname - endfor - endif - endif - - " Now we have all variables in int_vars dictionary - call extend(int_vars, ext_vars) - - " Internal solution for finding functions in current file. - let file = getline(1, '$') - call filter(file, - \ 'v:val =~ "function\\s\\+&\\?[a-zA-Z_\\x7f-\\xff][a-zA-Z_0-9\\x7f-\\xff]*\\s*("') - let fnames = join(map(tagfiles(), 'escape(v:val, " \\#%")')) - let jfile = join(file, ' ') - let int_values = split(jfile, 'function\s\+') - let int_functions = {} - for i in int_values - let f_name = matchstr(i, - \ '^&\?\zs[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\ze') - let f_args = matchstr(i, - \ '^&\?[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\s*(\zs.\{-}\ze)\_s*{') - let int_functions[f_name.'('] = f_args.')' - endfor - - " Prepare list of functions from tags file - let ext_functions = {} - if fnames != '' - exe 'silent! vimgrep /^'.a:base.'.*\tf\(\t\|$\)/j '.fnames - let qflist = getqflist() - if len(qflist) > 0 - for field in qflist - " File name - let item = matchstr(field['text'], '^[^[:space:]]\+') - let fname = matchstr(field['text'], '\t\zs\f\+\ze') - let prototype = matchstr(field['text'], - \ 'function\s\+&\?[^[:space:]]\+\s*(\s*\zs.\{-}\ze\s*)\s*{\?') - let ext_functions[item.'('] = prototype.') - '.fname - endfor - endif - endif - - let all_values = {} - call extend(all_values, int_functions) - call extend(all_values, ext_functions) - call extend(all_values, int_vars) " external variables are already in - call extend(all_values, g:php_builtin_object_functions) - - for m in sort(keys(all_values)) - if m =~ '\(^\|::\)'.a:base - call add(res, m) - endif - endfor - - let start_list = res - - let final_list = [] - for i in start_list - if has_key(int_vars, i) - let class = ' ' - if all_values[i] != '' - let class = i.' class ' - endif - let final_list += [{'word':i, 'info':class.all_values[i], 'kind':'v'}] - else - let final_list += - \ [{'word':substitute(i, '.*::', '', ''), - \ 'info':i.all_values[i], - \ 'kind':'f'}] - endif - endfor - - return final_list - endif - - if a:base =~ '^\$' - " Complete variables - " Built-in variables {{{ - let g:php_builtin_vars = {'$GLOBALS':'', - \ '$_SERVER':'', - \ '$_GET':'', - \ '$_POST':'', - \ '$_COOKIE':'', - \ '$_FILES':'', - \ '$_ENV':'', - \ '$_REQUEST':'', - \ '$_SESSION':'', - \ '$HTTP_SERVER_VARS':'', - \ '$HTTP_ENV_VARS':'', - \ '$HTTP_COOKIE_VARS':'', - \ '$HTTP_GET_VARS':'', - \ '$HTTP_POST_VARS':'', - \ '$HTTP_POST_FILES':'', - \ '$HTTP_SESSION_VARS':'', - \ '$php_errormsg':'', - \ '$this':'' - \ } - " }}} - - " Internal solution for current file. - let file = getline(1, '$') - let jfile = join(file, ' ') - let int_vals = split(jfile, '\ze\$') - let int_vars = {} - for i in int_vals - if i =~ '^\$[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\s*=\s*new' - let val = matchstr(i, - \ '^\$[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*').'->' - else - let val = matchstr(i, - \ '^\$[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*') - endif - if val != '' - let int_vars[val] = '' - endif - endfor - - call extend(int_vars,g:php_builtin_vars) - - " ctags has support for PHP, use tags file for external variables - let fnames = join(map(tagfiles(), 'escape(v:val, " \\#%")')) - let ext_vars = {} - if fnames != '' - let sbase = substitute(a:base, '^\$', '', '') - exe 'silent! vimgrep /^'.sbase.'.*\tv\(\t\|$\)/j '.fnames - let qflist = getqflist() - if len(qflist) > 0 - for field in qflist - let item = '$'.matchstr(field['text'], '^[^[:space:]]\+') - let m_menu = '' - " Add -> if it is possible object declaration - if field['text'] =~ item.'\s*=\s*new\s\+' - let item = item.'->' - let m_menu = matchstr(field['text'], - \ '=\s*new\s\+\zs[a-zA-Z_0-9\x7f-\xff]\+\ze') - endif - let ext_vars[item] = m_menu - endfor - endif - endif - - call extend(int_vars, ext_vars) - let g:a0 = keys(int_vars) - - for m in sort(keys(int_vars)) - if m =~ '^\'.a:base - call add(res, m) - endif - endfor - - let int_list = res - - let int_dict = [] - for i in int_list - if int_vars[i] != '' - let class = ' ' - if int_vars[i] != '' - let class = i.' class ' - endif - let int_dict += [{'word':i, 'info':class.int_vars[i], 'kind':'v'}] - else - let int_dict += [{'word':i, 'kind':'v'}] - endif - endfor - - return int_dict - - else - " Complete everything else - - " + functions, DONE - " + keywords of language DONE - " + defines (constant definitions), DONE - " + extend keywords for predefined constants, DONE - " + classes (after new), DONE - " + limit choice after -> and :: to funcs and vars DONE - - " Internal solution for finding functions in current file. - let file = getline(1, '$') - call filter(file, - \ 'v:val =~ "function\\s\\+&\\?[a-zA-Z_\\x7f-\\xff][a-zA-Z_0-9\\x7f-\\xff]*\\s*("') - let fnames = join(map(tagfiles(), 'escape(v:val, " \\#%")')) - let jfile = join(file, ' ') - let int_values = split(jfile, 'function\s\+') - let int_functions = {} - for i in int_values - let f_name = matchstr(i, - \ '^&\?\zs[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\ze') - let f_args = matchstr(i, - \ '^&\?[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\s*(\s*\zs.\{-}\ze\s*)\_s*{') - let int_functions[f_name.'('] = f_args.')' - endfor - - " Prepare list of functions from tags file - let ext_functions = {} - if fnames != '' - exe 'silent! vimgrep /^'.a:base.'.*\tf\(\t\|$\)/j '.fnames - let qflist = getqflist() - if len(qflist) > 0 - for field in qflist - " File name - let item = matchstr(field['text'], '^[^[:space:]]\+') - let fname = matchstr(field['text'], '\t\zs\f\+\ze') - let prototype = matchstr(field['text'], - \ 'function\s\+&\?[^[:space:]]\+\s*(\s*\zs.\{-}\ze\s*)\s*{\?') - let ext_functions[item.'('] = prototype.') - '.fname - endfor - endif - endif - - " All functions - call extend(int_functions, ext_functions) - call extend(int_functions, g:php_builtin_functions) - - " Internal solution for finding constants in current file - let file = getline(1, '$') - call filter(file, 'v:val =~ "define\\s*("') - let jfile = join(file, ' ') - let int_values = split(jfile, 'define\s*(\s*') - let int_constants = {} - for i in int_values - let c_name = matchstr(i, '\(["'']\)\zs[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\ze\1') - " let c_value = matchstr(i, - " \ '\(["'']\)[a-zA-Z_\x7f-\xff][a-zA-Z_0-9\x7f-\xff]*\1\s*,\s*\zs.\{-}\ze\s*)') - if c_name != '' - let int_constants[c_name] = '' " c_value - endif - endfor - - " Prepare list of constants from tags file - let fnames = join(map(tagfiles(), 'escape(v:val, " \\#%")')) - let ext_constants = {} - if fnames != '' - exe 'silent! vimgrep /^'.a:base.'.*\td\(\t\|$\)/j '.fnames - let qflist = getqflist() - if len(qflist) > 0 - for field in qflist - let item = matchstr(field['text'], '^[^[:space:]]\+') - let ext_constants[item] = '' - endfor - endif - endif - - " All constants - call extend(int_constants, ext_constants) - " Treat keywords as constants - - let all_values = {} - - " One big dictionary of functions - call extend(all_values, int_functions) - - " Add constants - call extend(all_values, int_constants) - " Add keywords - call extend(all_values, g:php_keywords) - - for m in sort(keys(all_values)) - if m =~ '^'.a:base - call add(res, m) - endif - endfor - - let int_list = res - - let final_list = [] - for i in int_list - if has_key(int_functions, i) - let final_list += - \ [{'word':i, - \ 'info':i.int_functions[i], - \ 'kind':'f'}] - elseif has_key(int_constants, i) - let final_list += [{'word':i, 'kind':'d'}] - else - let final_list += [{'word':i}] - endif - endfor - - return final_list - - endif - -endfunction -" vim:set foldmethod=marker: diff --git a/tests/examplefiles/pkgconfig_example.pc b/tests/examplefiles/pkgconfig_example.pc deleted file mode 100644 index 2a59204e..00000000 --- a/tests/examplefiles/pkgconfig_example.pc +++ /dev/null @@ -1,18 +0,0 @@ -# This is for a fictional package `yet another portable hatchpotch generator'. -prefix=/usr/local/opt/site/private # define variable `prefix` -exec_prefix=${prefix} # using variable reference -libdir=${exec_prefix}/lib -includedir=${prefix}/include -just_for_test=$${this is not a part of variable reference} # escape with `$$` - -Name: YAPHatchPotchGen -Description: Yet Another Portable HatchPotch GENerator. -Version: 352.9.3 -URL: http://www9.yaphatchpotchgen.net # Don't access. -Requires: piyohogelib-9.0 = 9.5.3 -Requires.private: nyorolib-3.0 = 3.0.9 -Conflicts: apiyohoge <= 8.3 -Libs: -L${libdir} -lyaphatchpotchgen-352.9 # using variable reference -Libs.private: -ll -ly -Cflags: -I${includedir}/piyohogelib-9.0 -I${libdir}/yaphatchpotchgen/include - diff --git a/tests/examplefiles/plain.bst b/tests/examplefiles/plain.bst deleted file mode 100644 index 7adf4bb0..00000000 --- a/tests/examplefiles/plain.bst +++ /dev/null @@ -1,1097 +0,0 @@ -% BibTeX standard bibliography style `plain' - % Version 0.99b (8-Dec-10 release) for BibTeX versions 0.99a or later. - % Copyright (C) 1984, 1985, 1988, 2010 Howard Trickey and Oren Patashnik. - % Unlimited copying and redistribution of this file are permitted as long as - % it is unmodified. Modifications (and redistribution of modified versions) - % are also permitted, but only if the resulting file is renamed to something - % besides btxbst.doc, plain.bst, unsrt.bst, alpha.bst, and abbrv.bst. - % This restriction helps ensure that all standard styles are identical. - % The file btxbst.doc has the documentation for this style. - -ENTRY - { address - author - booktitle - chapter - edition - editor - howpublished - institution - journal - key - month - note - number - organization - pages - publisher - school - series - title - type - volume - year - } - {} - { label } - -INTEGERS { output.state before.all mid.sentence after.sentence after.block } - -FUNCTION {init.state.consts} -{ #0 'before.all := - #1 'mid.sentence := - #2 'after.sentence := - #3 'after.block := -} - -STRINGS { s t } - -FUNCTION {output.nonnull} -{ 's := - output.state mid.sentence = - { ", " * write$ } - { output.state after.block = - { add.period$ write$ - newline$ - "\newblock " write$ - } - { output.state before.all = - 'write$ - { add.period$ " " * write$ } - if$ - } - if$ - mid.sentence 'output.state := - } - if$ - s -} - -FUNCTION {output} -{ duplicate$ empty$ - 'pop$ - 'output.nonnull - if$ -} - -FUNCTION {output.check} -{ 't := - duplicate$ empty$ - { pop$ "empty " t * " in " * cite$ * warning$ } - 'output.nonnull - if$ -} - -FUNCTION {output.bibitem} -{ newline$ - "\bibitem{" write$ - cite$ write$ - "}" write$ - newline$ - "" - before.all 'output.state := -} - -FUNCTION {fin.entry} -{ add.period$ - write$ - newline$ -} - -FUNCTION {new.block} -{ output.state before.all = - 'skip$ - { after.block 'output.state := } - if$ -} - -FUNCTION {new.sentence} -{ output.state after.block = - 'skip$ - { output.state before.all = - 'skip$ - { after.sentence 'output.state := } - if$ - } - if$ -} - -FUNCTION {not} -{ { #0 } - { #1 } - if$ -} - -FUNCTION {and} -{ 'skip$ - { pop$ #0 } - if$ -} - -FUNCTION {or} -{ { pop$ #1 } - 'skip$ - if$ -} - -FUNCTION {new.block.checka} -{ empty$ - 'skip$ - 'new.block - if$ -} - -FUNCTION {new.block.checkb} -{ empty$ - swap$ empty$ - and - 'skip$ - 'new.block - if$ -} - -FUNCTION {new.sentence.checka} -{ empty$ - 'skip$ - 'new.sentence - if$ -} - -FUNCTION {new.sentence.checkb} -{ empty$ - swap$ empty$ - and - 'skip$ - 'new.sentence - if$ -} - -FUNCTION {field.or.null} -{ duplicate$ empty$ - { pop$ "" } - 'skip$ - if$ -} - -FUNCTION {emphasize} -{ duplicate$ empty$ - { pop$ "" } - { "{\em " swap$ * "}" * } - if$ -} - -INTEGERS { nameptr namesleft numnames } - -FUNCTION {format.names} -{ 's := - #1 'nameptr := - s num.names$ 'numnames := - numnames 'namesleft := - { namesleft #0 > } - { s nameptr "{ff~}{vv~}{ll}{, jj}" format.name$ 't := - nameptr #1 > - { namesleft #1 > - { ", " * t * } - { numnames #2 > - { "," * } - 'skip$ - if$ - t "others" = - { " et~al." * } - { " and " * t * } - if$ - } - if$ - } - 't - if$ - nameptr #1 + 'nameptr := - namesleft #1 - 'namesleft := - } - while$ -} - -FUNCTION {format.authors} -{ author empty$ - { "" } - { author format.names } - if$ -} - -FUNCTION {format.editors} -{ editor empty$ - { "" } - { editor format.names - editor num.names$ #1 > - { ", editors" * } - { ", editor" * } - if$ - } - if$ -} - -FUNCTION {format.title} -{ title empty$ - { "" } - { title "t" change.case$ } - if$ -} - -FUNCTION {n.dashify} -{ 't := - "" - { t empty$ not } - { t #1 #1 substring$ "-" = - { t #1 #2 substring$ "--" = not - { "--" * - t #2 global.max$ substring$ 't := - } - { { t #1 #1 substring$ "-" = } - { "-" * - t #2 global.max$ substring$ 't := - } - while$ - } - if$ - } - { t #1 #1 substring$ * - t #2 global.max$ substring$ 't := - } - if$ - } - while$ -} - -FUNCTION {format.date} -{ year empty$ - { month empty$ - { "" } - { "there's a month but no year in " cite$ * warning$ - month - } - if$ - } - { month empty$ - 'year - { month " " * year * } - if$ - } - if$ -} - -FUNCTION {format.btitle} -{ title emphasize -} - -FUNCTION {tie.or.space.connect} -{ duplicate$ text.length$ #3 < - { "~" } - { " " } - if$ - swap$ * * -} - -FUNCTION {either.or.check} -{ empty$ - 'pop$ - { "can't use both " swap$ * " fields in " * cite$ * warning$ } - if$ -} - -FUNCTION {format.bvolume} -{ volume empty$ - { "" } - { "volume" volume tie.or.space.connect - series empty$ - 'skip$ - { " of " * series emphasize * } - if$ - "volume and number" number either.or.check - } - if$ -} - -FUNCTION {format.number.series} -{ volume empty$ - { number empty$ - { series field.or.null } - { output.state mid.sentence = - { "number" } - { "Number" } - if$ - number tie.or.space.connect - series empty$ - { "there's a number but no series in " cite$ * warning$ } - { " in " * series * } - if$ - } - if$ - } - { "" } - if$ -} - -FUNCTION {format.edition} -{ edition empty$ - { "" } - { output.state mid.sentence = - { edition "l" change.case$ " edition" * } - { edition "t" change.case$ " edition" * } - if$ - } - if$ -} - -INTEGERS { multiresult } - -FUNCTION {multi.page.check} -{ 't := - #0 'multiresult := - { multiresult not - t empty$ not - and - } - { t #1 #1 substring$ - duplicate$ "-" = - swap$ duplicate$ "," = - swap$ "+" = - or or - { #1 'multiresult := } - { t #2 global.max$ substring$ 't := } - if$ - } - while$ - multiresult -} - -FUNCTION {format.pages} -{ pages empty$ - { "" } - { pages multi.page.check - { "pages" pages n.dashify tie.or.space.connect } - { "page" pages tie.or.space.connect } - if$ - } - if$ -} - -FUNCTION {format.vol.num.pages} -{ volume field.or.null - number empty$ - 'skip$ - { "(" number * ")" * * - volume empty$ - { "there's a number but no volume in " cite$ * warning$ } - 'skip$ - if$ - } - if$ - pages empty$ - 'skip$ - { duplicate$ empty$ - { pop$ format.pages } - { ":" * pages n.dashify * } - if$ - } - if$ -} - -FUNCTION {format.chapter.pages} -{ chapter empty$ - 'format.pages - { type empty$ - { "chapter" } - { type "l" change.case$ } - if$ - chapter tie.or.space.connect - pages empty$ - 'skip$ - { ", " * format.pages * } - if$ - } - if$ -} - -FUNCTION {format.in.ed.booktitle} -{ booktitle empty$ - { "" } - { editor empty$ - { "In " booktitle emphasize * } - { "In " format.editors * ", " * booktitle emphasize * } - if$ - } - if$ -} - -FUNCTION {empty.misc.check} -{ author empty$ title empty$ howpublished empty$ - month empty$ year empty$ note empty$ - and and and and and - key empty$ not and - { "all relevant fields are empty in " cite$ * warning$ } - 'skip$ - if$ -} - -FUNCTION {format.thesis.type} -{ type empty$ - 'skip$ - { pop$ - type "t" change.case$ - } - if$ -} - -FUNCTION {format.tr.number} -{ type empty$ - { "Technical Report" } - 'type - if$ - number empty$ - { "t" change.case$ } - { number tie.or.space.connect } - if$ -} - -FUNCTION {format.article.crossref} -{ key empty$ - { journal empty$ - { "need key or journal for " cite$ * " to crossref " * crossref * - warning$ - "" - } - { "In {\em " journal * "\/}" * } - if$ - } - { "In " key * } - if$ - " \cite{" * crossref * "}" * -} - -FUNCTION {format.crossref.editor} -{ editor #1 "{vv~}{ll}" format.name$ - editor num.names$ duplicate$ - #2 > - { pop$ " et~al." * } - { #2 < - 'skip$ - { editor #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" = - { " et~al." * } - { " and " * editor #2 "{vv~}{ll}" format.name$ * } - if$ - } - if$ - } - if$ -} - -FUNCTION {format.book.crossref} -{ volume empty$ - { "empty volume in " cite$ * "'s crossref of " * crossref * warning$ - "In " - } - { "Volume" volume tie.or.space.connect - " of " * - } - if$ - editor empty$ - editor field.or.null author field.or.null = - or - { key empty$ - { series empty$ - { "need editor, key, or series for " cite$ * " to crossref " * - crossref * warning$ - "" * - } - { "{\em " * series * "\/}" * } - if$ - } - { key * } - if$ - } - { format.crossref.editor * } - if$ - " \cite{" * crossref * "}" * -} - -FUNCTION {format.incoll.inproc.crossref} -{ editor empty$ - editor field.or.null author field.or.null = - or - { key empty$ - { booktitle empty$ - { "need editor, key, or booktitle for " cite$ * " to crossref " * - crossref * warning$ - "" - } - { "In {\em " booktitle * "\/}" * } - if$ - } - { "In " key * } - if$ - } - { "In " format.crossref.editor * } - if$ - " \cite{" * crossref * "}" * -} - -FUNCTION {article} -{ output.bibitem - format.authors "author" output.check - new.block - format.title "title" output.check - new.block - crossref missing$ - { journal emphasize "journal" output.check - format.vol.num.pages output - format.date "year" output.check - } - { format.article.crossref output.nonnull - format.pages output - } - if$ - new.block - note output - fin.entry -} - -FUNCTION {book} -{ output.bibitem - author empty$ - { format.editors "author and editor" output.check } - { format.authors output.nonnull - crossref missing$ - { "author and editor" editor either.or.check } - 'skip$ - if$ - } - if$ - new.block - format.btitle "title" output.check - crossref missing$ - { format.bvolume output - new.block - format.number.series output - new.sentence - publisher "publisher" output.check - address output - } - { new.block - format.book.crossref output.nonnull - } - if$ - format.edition output - format.date "year" output.check - new.block - note output - fin.entry -} - -FUNCTION {booklet} -{ output.bibitem - format.authors output - new.block - format.title "title" output.check - howpublished address new.block.checkb - howpublished output - address output - format.date output - new.block - note output - fin.entry -} - -FUNCTION {inbook} -{ output.bibitem - author empty$ - { format.editors "author and editor" output.check } - { format.authors output.nonnull - crossref missing$ - { "author and editor" editor either.or.check } - 'skip$ - if$ - } - if$ - new.block - format.btitle "title" output.check - crossref missing$ - { format.bvolume output - format.chapter.pages "chapter and pages" output.check - new.block - format.number.series output - new.sentence - publisher "publisher" output.check - address output - } - { format.chapter.pages "chapter and pages" output.check - new.block - format.book.crossref output.nonnull - } - if$ - format.edition output - format.date "year" output.check - new.block - note output - fin.entry -} - -FUNCTION {incollection} -{ output.bibitem - format.authors "author" output.check - new.block - format.title "title" output.check - new.block - crossref missing$ - { format.in.ed.booktitle "booktitle" output.check - format.bvolume output - format.number.series output - format.chapter.pages output - new.sentence - publisher "publisher" output.check - address output - format.edition output - format.date "year" output.check - } - { format.incoll.inproc.crossref output.nonnull - format.chapter.pages output - } - if$ - new.block - note output - fin.entry -} - -FUNCTION {inproceedings} -{ output.bibitem - format.authors "author" output.check - new.block - format.title "title" output.check - new.block - crossref missing$ - { format.in.ed.booktitle "booktitle" output.check - format.bvolume output - format.number.series output - format.pages output - address empty$ - { organization publisher new.sentence.checkb - organization output - publisher output - format.date "year" output.check - } - { address output.nonnull - format.date "year" output.check - new.sentence - organization output - publisher output - } - if$ - } - { format.incoll.inproc.crossref output.nonnull - format.pages output - } - if$ - new.block - note output - fin.entry -} - -FUNCTION {conference} { inproceedings } - -FUNCTION {manual} -{ output.bibitem - author empty$ - { organization empty$ - 'skip$ - { organization output.nonnull - address output - } - if$ - } - { format.authors output.nonnull } - if$ - new.block - format.btitle "title" output.check - author empty$ - { organization empty$ - { address new.block.checka - address output - } - 'skip$ - if$ - } - { organization address new.block.checkb - organization output - address output - } - if$ - format.edition output - format.date output - new.block - note output - fin.entry -} - -FUNCTION {mastersthesis} -{ output.bibitem - format.authors "author" output.check - new.block - format.title "title" output.check - new.block - "Master's thesis" format.thesis.type output.nonnull - school "school" output.check - address output - format.date "year" output.check - new.block - note output - fin.entry -} - -FUNCTION {misc} -{ output.bibitem - format.authors output - title howpublished new.block.checkb - format.title output - howpublished new.block.checka - howpublished output - format.date output - new.block - note output - fin.entry - empty.misc.check -} - -FUNCTION {phdthesis} -{ output.bibitem - format.authors "author" output.check - new.block - format.btitle "title" output.check - new.block - "PhD thesis" format.thesis.type output.nonnull - school "school" output.check - address output - format.date "year" output.check - new.block - note output - fin.entry -} - -FUNCTION {proceedings} -{ output.bibitem - editor empty$ - { organization output } - { format.editors output.nonnull } - if$ - new.block - format.btitle "title" output.check - format.bvolume output - format.number.series output - address empty$ - { editor empty$ - { publisher new.sentence.checka } - { organization publisher new.sentence.checkb - organization output - } - if$ - publisher output - format.date "year" output.check - } - { address output.nonnull - format.date "year" output.check - new.sentence - editor empty$ - 'skip$ - { organization output } - if$ - publisher output - } - if$ - new.block - note output - fin.entry -} - -FUNCTION {techreport} -{ output.bibitem - format.authors "author" output.check - new.block - format.title "title" output.check - new.block - format.tr.number output.nonnull - institution "institution" output.check - address output - format.date "year" output.check - new.block - note output - fin.entry -} - -FUNCTION {unpublished} -{ output.bibitem - format.authors "author" output.check - new.block - format.title "title" output.check - new.block - note "note" output.check - format.date output - fin.entry -} - -FUNCTION {default.type} { misc } - -MACRO {jan} {"January"} - -MACRO {feb} {"February"} - -MACRO {mar} {"March"} - -MACRO {apr} {"April"} - -MACRO {may} {"May"} - -MACRO {jun} {"June"} - -MACRO {jul} {"July"} - -MACRO {aug} {"August"} - -MACRO {sep} {"September"} - -MACRO {oct} {"October"} - -MACRO {nov} {"November"} - -MACRO {dec} {"December"} - -MACRO {acmcs} {"ACM Computing Surveys"} - -MACRO {acta} {"Acta Informatica"} - -MACRO {cacm} {"Communications of the ACM"} - -MACRO {ibmjrd} {"IBM Journal of Research and Development"} - -MACRO {ibmsj} {"IBM Systems Journal"} - -MACRO {ieeese} {"IEEE Transactions on Software Engineering"} - -MACRO {ieeetc} {"IEEE Transactions on Computers"} - -MACRO {ieeetcad} - {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"} - -MACRO {ipl} {"Information Processing Letters"} - -MACRO {jacm} {"Journal of the ACM"} - -MACRO {jcss} {"Journal of Computer and System Sciences"} - -MACRO {scp} {"Science of Computer Programming"} - -MACRO {sicomp} {"SIAM Journal on Computing"} - -MACRO {tocs} {"ACM Transactions on Computer Systems"} - -MACRO {tods} {"ACM Transactions on Database Systems"} - -MACRO {tog} {"ACM Transactions on Graphics"} - -MACRO {toms} {"ACM Transactions on Mathematical Software"} - -MACRO {toois} {"ACM Transactions on Office Information Systems"} - -MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"} - -MACRO {tcs} {"Theoretical Computer Science"} - -READ - -FUNCTION {sortify} -{ purify$ - "l" change.case$ -} - -INTEGERS { len } - -FUNCTION {chop.word} -{ 's := - 'len := - s #1 len substring$ = - { s len #1 + global.max$ substring$ } - 's - if$ -} - -FUNCTION {sort.format.names} -{ 's := - #1 'nameptr := - "" - s num.names$ 'numnames := - numnames 'namesleft := - { namesleft #0 > } - { nameptr #1 > - { " " * } - 'skip$ - if$ - s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't := - nameptr numnames = t "others" = and - { "et al" * } - { t sortify * } - if$ - nameptr #1 + 'nameptr := - namesleft #1 - 'namesleft := - } - while$ -} - -FUNCTION {sort.format.title} -{ 't := - "A " #2 - "An " #3 - "The " #4 t chop.word - chop.word - chop.word - sortify - #1 global.max$ substring$ -} - -FUNCTION {author.sort} -{ author empty$ - { key empty$ - { "to sort, need author or key in " cite$ * warning$ - "" - } - { key sortify } - if$ - } - { author sort.format.names } - if$ -} - -FUNCTION {author.editor.sort} -{ author empty$ - { editor empty$ - { key empty$ - { "to sort, need author, editor, or key in " cite$ * warning$ - "" - } - { key sortify } - if$ - } - { editor sort.format.names } - if$ - } - { author sort.format.names } - if$ -} - -FUNCTION {author.organization.sort} -{ author empty$ - { organization empty$ - { key empty$ - { "to sort, need author, organization, or key in " cite$ * warning$ - "" - } - { key sortify } - if$ - } - { "The " #4 organization chop.word sortify } - if$ - } - { author sort.format.names } - if$ -} - -FUNCTION {editor.organization.sort} -{ editor empty$ - { organization empty$ - { key empty$ - { "to sort, need editor, organization, or key in " cite$ * warning$ - "" - } - { key sortify } - if$ - } - { "The " #4 organization chop.word sortify } - if$ - } - { editor sort.format.names } - if$ -} - -FUNCTION {presort} -{ type$ "book" = - type$ "inbook" = - or - 'author.editor.sort - { type$ "proceedings" = - 'editor.organization.sort - { type$ "manual" = - 'author.organization.sort - 'author.sort - if$ - } - if$ - } - if$ - " " - * - year field.or.null sortify - * - " " - * - title field.or.null - sort.format.title - * - #1 entry.max$ substring$ - 'sort.key$ := -} - -ITERATE {presort} - -SORT - -STRINGS { longest.label } - -INTEGERS { number.label longest.label.width } - -FUNCTION {initialize.longest.label} -{ "" 'longest.label := - #1 'number.label := - #0 'longest.label.width := -} - -FUNCTION {longest.label.pass} -{ number.label int.to.str$ 'label := - number.label #1 + 'number.label := - label width$ longest.label.width > - { label 'longest.label := - label width$ 'longest.label.width := - } - 'skip$ - if$ -} - -EXECUTE {initialize.longest.label} - -ITERATE {longest.label.pass} - -FUNCTION {begin.bib} -{ preamble$ empty$ - 'skip$ - { preamble$ write$ newline$ } - if$ - "\begin{thebibliography}{" longest.label * "}" * write$ newline$ -} - -EXECUTE {begin.bib} - -EXECUTE {init.state.consts} - -ITERATE {call.type$} - -FUNCTION {end.bib} -{ newline$ - "\end{thebibliography}" write$ newline$ -} - -EXECUTE {end.bib} diff --git a/tests/examplefiles/pleac.in.rb b/tests/examplefiles/pleac.in.rb deleted file mode 100644 index d1dea9f4..00000000 --- a/tests/examplefiles/pleac.in.rb +++ /dev/null @@ -1,1223 +0,0 @@ -# -*- ruby -*- - -# Local variables: -# indent-tabs-mode: nil -# ruby-indent-level: 4 -# End: - -# @@PLEAC@@_NAME -# @@SKIP@@ Ruby - -# @@PLEAC@@_WEB -# @@SKIP@@ http://www.ruby-lang.org - - -# @@PLEAC@@_1.0 -string = '\n' # two characters, \ and an n -string = 'Jon \'Maddog\' Orwant' # literal single quotes - -string = "\n" # a "newline" character -string = "Jon \"Maddog\" Orwant" # literal double quotes - -string = %q/Jon 'Maddog' Orwant/ # literal single quotes - -string = %q[Jon 'Maddog' Orwant] # literal single quotes -string = %q{Jon 'Maddog' Orwant} # literal single quotes -string = %q(Jon 'Maddog' Orwant) # literal single quotes -string = %q # literal single quotes - -a = <<"EOF" -This is a multiline here document -terminated by EOF on a line by itself -EOF - - -# @@PLEAC@@_1.1 -value = string[offset,count] -value = string[offset..-1] - -string[offset,count] = newstring -string[offset..-1] = newtail - -# in Ruby we can also specify intervals by their two offsets -value = string[offset..offs2] -string[offset..offs2] = newstring - -leading, s1, s2, trailing = data.unpack("A5 x3 A8 A8 A*") - -fivers = string.unpack("A5" * (string.length/5)) - -chars = string.unpack("A1" * string.length) - -string = "This is what you have" -# +012345678901234567890 Indexing forwards (left to right) -# 109876543210987654321- Indexing backwards (right to left) -# note that 0 means 10 or 20, etc. above - -first = string[0, 1] # "T" -start = string[5, 2] # "is" -rest = string[13..-1] # "you have" -last = string[-1, 1] # "e" -end_ = string[-4..-1] # "have" -piece = string[-8, 3] # "you" - -string[5, 2] = "wasn't" # change "is" to "wasn't" -string[-12..-1] = "ondrous" # "This wasn't wondrous" -string[0, 1] = "" # delete first character -string[-10..-1] = "" # delete last 10 characters - -if string[-10..-1] =~ /pattern/ - puts "Pattern matches in last 10 characters" -end - -string[0, 5].gsub!(/is/, 'at') - -a = "make a hat" -a[0, 1], a[-1, 1] = a[-1, 1], a[0, 1] - -a = "To be or not to be" -b = a.unpack("x6 A6") - -b, c = a.unpack("x6 A2 X5 A2") -puts "#{b}\n#{c}\n" - -def cut2fmt(*args) - template = '' - lastpos = 1 - for place in args - template += "A" + (place - lastpos).to_s + " " - lastpos = place - end - template += "A*" - return template -end - -fmt = cut2fmt(8, 14, 20, 26, 30) - - -# @@PLEAC@@_1.2 -# careful! "b is true" doesn't mean "b != 0" (0 is true in Ruby) -# thus no problem of "defined" later since only nil is false -# the following sets to `c' if `b' is nil or false -a = b || c - -# if you need Perl's behaviour (setting to `c' if `b' is 0) the most -# effective way is to use Numeric#nonzero? (thanks to Dave Thomas!) -a = b.nonzero? || c - -# you will still want to use defined? in order to test -# for scope existence of a given object -a = defined?(b) ? b : c - -dir = ARGV.shift || "/tmp" - - -# @@PLEAC@@_1.3 -v1, v2 = v2, v1 - -alpha, beta, production = %w(January March August) -alpha, beta, production = beta, production, alpha - - -# @@PLEAC@@_1.4 -num = char[0] -char = num.chr - -# Ruby also supports having a char from character constant -num = ?r - -char = sprintf("%c", num) -printf("Number %d is character %c\n", num, num) - -ascii = string.unpack("C*") -string = ascii.pack("C*") - -hal = "HAL" -ascii = hal.unpack("C*") -# We can't use Array#each since we can't mutate a Fixnum -ascii.collect! { |i| - i + 1 # add one to each ASCII value -} -ibm = ascii.pack("C*") -puts ibm - - -# @@PLEAC@@_1.5 -array = string.split('') - -array = string.unpack("C*") - -string.scan(/./) { |b| - # do something with b -} - -string = "an apple a day" -print "unique chars are: ", string.split('').uniq.sort, "\n" - -sum = 0 -for ascval in string.unpack("C*") # or use Array#each for a pure OO style :) - sum += ascval -end -puts "sum is #{sum & 0xffffffff}" # since Ruby will go Bignum if necessary - -# @@INCLUDE@@ include/ruby/slowcat.rb - - -# @@PLEAC@@_1.6 -revbytes = string.reverse - -revwords = string.split(" ").reverse.join(" ") - -revwords = string.split(/(\s+)/).reverse.join - -# using the fact that IO is Enumerable, you can directly "select" it -long_palindromes = File.open("/usr/share/dict/words"). - select { |w| w.chomp!; w.reverse == w && w.length > 5 } - - -# @@PLEAC@@_1.7 -while string.sub!("\t+") { ' ' * ($&.length * 8 - $`.length % 8) } -end - - -# @@PLEAC@@_1.8 -'You owe #{debt} to me'.gsub(/\#{(\w+)}/) { eval($1) } - -rows, cols = 24, 80 -text = %q(I am #{rows} high and #{cols} long) -text.gsub!(/\#{(\w+)}/) { eval("#{$1}") } -puts text - -'I am 17 years old'.gsub(/\d+/) { 2 * $&.to_i } - - -# @@PLEAC@@_1.9 -e = "bo peep".upcase -e.downcase! -e.capitalize! - -"thIS is a loNG liNE".gsub!(/\w+/) { $&.capitalize } - - -# @@PLEAC@@_1.10 -"I have #{n+1} guanacos." -print "I have ", n+1, " guanacos." - - -# @@PLEAC@@_1.11 -var = <<'EOF'.gsub(/^\s+/, '') - your text - goes here -EOF - - -# @@PLEAC@@_1.12 -string = "Folding and splicing is the work of an editor,\n"+ - "not a mere collection of silicon\n"+ - "and\n"+ - "mobile electrons!" - -def wrap(str, max_size) - all = [] - line = '' - for l in str.split - if (line+l).length >= max_size - all.push(line) - line = '' - end - line += line == '' ? l : ' ' + l - end - all.push(line).join("\n") -end - -print wrap(string, 20) -#=> Folding and -#=> splicing is the -#=> work of an editor, -#=> not a mere -#=> collection of -#=> silicon and mobile -#=> electrons! - - -# @@PLEAC@@_1.13 -string = %q(Mom said, "Don't do that.") -string.gsub(/['"]/) { '\\'+$& } -string.gsub(/['"]/, '\&\&') -string.gsub(/[^A-Z]/) { '\\'+$& } -"is a test!".gsub(/\W/) { '\\'+$& } # no function like quotemeta? - - -# @@PLEAC@@_1.14 -string.strip! - - -# @@PLEAC@@_1.15 -def parse_csv(text) - new = text.scan(/"([^\"\\]*(?:\\.[^\"\\]*)*)",?|([^,]+),?|,/) - new << nil if text[-1] == ?, - new.flatten.compact -end - -line = %q -fields = parse_csv(line) -fields.each_with_index { |v,i| - print "#{i} : #{v}\n"; -} - - -# @@PLEAC@@_1.16 -# Use the soundex.rb Library from Michael Neumann. -# http://www.s-direktnet.de/homepages/neumann/rb_prgs/Soundex.rb -require 'Soundex' - -code = Text::Soundex.soundex(string) -codes = Text::Soundex.soundex(array) - -# substitution function for getpwent(): -# returns an array of user entries, -# each entry contains the username and the full name -def login_names - result = [] - File.open("/etc/passwd") { |file| - file.each_line { |line| - next if line.match(/^#/) - cols = line.split(":") - result.push([cols[0], cols[4]]) - } - } - result -end - -puts "Lookup user: " -user = STDIN.gets -user.chomp! -exit unless user -name_code = Text::Soundex.soundex(user) - -splitter = Regexp.new('(\w+)[^,]*\b(\w+)') -for username, fullname in login_names do - firstname, lastname = splitter.match(fullname)[1,2] - if name_code == Text::Soundex.soundex(username) - || name_code == Text::Soundex.soundex(firstname) - || name_code == Text::Soundex.soundex(lastname) - then - puts "#{username}: #{firstname} #{lastname}" - end -end - - -# @@PLEAC@@_1.17 -# @@INCLUDE@@ include/ruby/fixstyle.rb - - -# @@PLEAC@@_1.18 -# @@INCLUDE@@ include/ruby/psgrep.rb - - -# @@PLEAC@@_2.1 -# Matz tells that you can use Integer() for strict checked conversion. -Integer("abc") -#=> `Integer': invalid value for Integer: "abc" (ArgumentError) -Integer("567") -#=> 567 - -# You may use Float() for floating point stuff -Integer("56.7") -#=> `Integer': invalid value for Integer: "56.7" (ArgumentError) -Float("56.7") -#=> 56.7 - -# You may also use a regexp for that -if string =~ /^[+-]?\d+$/ - p 'is an integer' -else - p 'is not' -end - -if string =~ /^-?(?:\d+(?:\.\d*)?|\.\d+)$/ - p 'is a decimal number' -else - p 'is not' -end - - -# @@PLEAC@@_2.2 -# equal(num1, num2, accuracy) : returns true if num1 and num2 are -# equal to accuracy number of decimal places -def equal(i, j, a) - sprintf("%.#{a}g", i) == sprintf("%.#{a}g", j) -end - -wage = 536 # $5.36/hour -week = 40 * wage # $214.40 -printf("One week's wage is: \$%.2f\n", week/100.0) - - -# @@PLEAC@@_2.3 -num.round # rounds to integer - -a = 0.255 -b = sprintf("%.2f", a) -print "Unrounded: #{a}\nRounded: #{b}\n" -printf "Unrounded: #{a}\nRounded: %.2f\n", a - -print "number\tint\tfloor\tceil\n" -a = [ 3.3 , 3.5 , 3.7, -3.3 ] -for n in a - printf("% .1f\t% .1f\t% .1f\t% .1f\n", # at least I don't fake my output :) - n, n.to_i, n.floor, n.ceil) -end - - -# @@PLEAC@@_2.4 -def dec2bin(n) - [n].pack("N").unpack("B32")[0].sub(/^0+(?=\d)/, '') -end - -def bin2dec(n) - [("0"*32+n.to_s)[-32..-1]].pack("B32").unpack("N")[0] -end - - -# @@PLEAC@@_2.5 -for i in x .. y - # i is set to every integer from x to y, inclusive -end - -x.step(y,7) { |i| - # i is set to every integer from x to y, stepsize = 7 -} - -print "Infancy is: " -(0..2).each { |i| - print i, " " -} -print "\n" - - -# @@PLEAC@@_2.6 -# We can add conversion methods to the Integer class, -# this makes a roman number just a representation for normal numbers. -class Integer - - @@romanlist = [["M", 1000], - ["CM", 900], - ["D", 500], - ["CD", 400], - ["C", 100], - ["XC", 90], - ["L", 50], - ["XL", 40], - ["X", 10], - ["IX", 9], - ["V", 5], - ["IV", 4], - ["I", 1]] - - def to_roman - remains = self - roman = "" - for sym, num in @@romanlist - while remains >= num - remains -= num - roman << sym - end - end - roman - end - - def Integer.from_roman(roman) - ustr = roman.upcase - sum = 0 - for entry in @@romanlist - sym, num = entry[0], entry[1] - while sym == ustr[0, sym.length] - sum += num - ustr.slice!(0, sym.length) - end - end - sum - end - -end - - -roman_fifteen = 15.to_roman -puts "Roman for fifteen is #{roman_fifteen}" -i = Integer.from_roman(roman_fifteen) -puts "Converted back, #{roman_fifteen} is #{i}" - -# check -for i in (1..3900) - r = i.to_roman - j = Integer.from_roman(r) - if i != j - puts "error: #{i} : #{r} - #{j}" - end -end - - -# @@PLEAC@@_2.7 -random = rand(y-x+1)+x - -chars = ["A".."Z","a".."z","0".."9"].collect { |r| r.to_a }.join + %q(!@$%^&*) -password = (1..8).collect { chars[rand(chars.size)] }.pack("C*") - - -# @@PLEAC@@_2.8 -srand # uses a combination of the time, the process id, and a sequence number -srand(val) # for repeatable behaviour - - -# @@PLEAC@@_2.9 -# from the randomr lib: -# http://raa.ruby-lang.org/project/randomr/ -----> http://raa.ruby-lang.org/project/randomr/ - -require 'random/mersenne_twister' -mers = Random::MersenneTwister.new 123456789 -puts mers.rand(0) # 0.550321932544541 -puts mers.rand(10) # 2 - -# using online sources of random data via the realrand package: -# http://raa.ruby-lang.org/project/realrand/ -# **Note** -# The following online services are used in this package: -# http://www.random.org - source: atmospheric noise -# http://www.fourmilab.ch/hotbits - source: radioactive decay timings -# http://random.hd.org - source: entropy from local and network noise -# Please visit the sites and respect the rules of each service. - -require 'random/online' - -generator1 = Random::RandomOrg.new -puts generator1.randbyte(5).join(",") -puts generator1.randnum(10, 1, 6).join(",") # Roll dice 10 times. - -generator2 = Random::FourmiLab.new -puts generator2.randbyte(5).join(",") -# randnum is not supported. - -generator3 = Random::EntropyPool.new -puts generator3.randbyte(5).join(",") -# randnum is not supported. - - -# @@PLEAC@@_2.10 -def gaussian_rand - begin - u1 = 2 * rand() - 1 - u2 = 2 * rand() - 1 - w = u1*u1 + u2*u2 - end while (w >= 1) - w = Math.sqrt((-2*Math.log(w))/w) - [ u2*w, u1*w ] -end - -mean = 25 -sdev = 2 -salary = gaussian_rand[0] * sdev + mean -printf("You have been hired at \$%.2f\n", salary) - - -# @@PLEAC@@_2.11 -def deg2rad(d) - (d/180.0)*Math::PI -end - -def rad2deg(r) - (r/Math::PI)*180 -end - - -# @@PLEAC@@_2.12 -sin_val = Math.sin(angle) -cos_val = Math.cos(angle) -tan_val = Math.tan(angle) - -# AFAIK Ruby's Math module doesn't provide acos/asin -# While we're at it, let's also define missing hyperbolic functions -module Math - def Math.asin(x) - atan2(x, sqrt(1 - x**2)) - end - def Math.acos(x) - atan2(sqrt(1 - x**2), x) - end - def Math.atan(x) - atan2(x, 1) - end - def Math.sinh(x) - (exp(x) - exp(-x)) / 2 - end - def Math.cosh(x) - (exp(x) + exp(-x)) / 2 - end - def Math.tanh(x) - sinh(x) / cosh(x) - end -end - -# The support for Complex numbers is not built-in -y = Math.acos(3.7) -#=> in `sqrt': square root for negative number (ArgumentError) - -# There is an implementation of Complex numbers in 'complex.rb' in current -# Ruby distro, but it doesn't support atan2 with complex args, so it doesn't -# solve this problem. - - -# @@PLEAC@@_2.13 -log_e = Math.log(val) -log_10 = Math.log10(val) - -def log_base(base, val) - Math.log(val)/Math.log(base) -end - -answer = log_base(10, 10_000) -puts "log10(10,000) = #{answer}" - - -# @@PLEAC@@_2.14 -require 'matrix.rb' - -a = Matrix[[3, 2, 3], [5, 9, 8]] -b = Matrix[[4, 7], [9, 3], [8, 1]] -c = a * b - -a.row_size -a.column_size - -c.det -a.transpose - - -# @@PLEAC@@_2.15 -require 'complex.rb' -require 'rational.rb' - -a = Complex(3, 5) # 3 + 5i -b = Complex(2, -2) # 2 - 2i -puts "c = #{a*b}" - -c = a * b -d = 3 + 4*Complex::I - -printf "sqrt(#{d}) = %s\n", Math.sqrt(d) - - -# @@PLEAC@@_2.16 -number = hexadecimal.hex -number = octal.oct - -print "Gimme a number in decimal, octal, or hex: " -num = gets.chomp -exit unless defined?(num) -num = num.oct if num =~ /^0/ # does both oct and hex -printf "%d %x %o\n", num, num, num - -print "Enter file permission in octal: " -permissions = gets.chomp -raise "Exiting ...\n" unless defined?(permissions) -puts "The decimal value is #{permissions.oct}" - - -# @@PLEAC@@_2.17 -def commify(n) - n.to_s =~ /([^\.]*)(\..*)?/ - int, dec = $1.reverse, $2 ? $2 : "" - while int.gsub!(/(,|\.|^)(\d{3})(\d)/, '\1\2,\3') - end - int.reverse + dec -end - - -# @@PLEAC@@_2.18 -printf "It took %d hour%s\n", time, time == 1 ? "" : "s" - -# dunno if an equivalent to Lingua::EN::Inflect exists... - - -# @@PLEAC@@_2.19 -#----------------------------- -#!/usr/bin/ruby -# bigfact - calculating prime factors -def factorize(orig) - factors = {} - factors.default = 0 # return 0 instead nil if key not found in hash - n = orig - i = 2 - sqi = 4 # square of i - while sqi <= n do - while n.modulo(i) == 0 do - n /= i - factors[i] += 1 - # puts "Found factor #{i}" - end - # we take advantage of the fact that (i +1)**2 = i**2 + 2*i +1 - sqi += 2 * i + 1 - i += 1 - end - - if (n != 1) && (n != orig) - factors[n] += 1 - end - factors -end - -def printfactorhash(orig, factorcount) - print format("%-10d ", orig) - if factorcount.length == 0 - print "PRIME" - else - # sorts after number, because the hash keys are numbers - factorcount.sort.each { |factor,exponent| - print factor - if exponent > 1 - print "**", exponent - end - print " " - } - end - puts -end - -for arg in ARGV - n = arg.to_i - mfactors = factorize(n) - printfactorhash(n, mfactors) -end -#----------------------------- - - -# @@PLEAC@@_3.0 -puts Time.now - -print "Today is day ", Time.now.yday, " of the current year.\n" -print "Today is day ", Time.now.day, " of the current month.\n" - - -# @@PLEAC@@_3.1 -day, month, year = Time.now.day, Time.now.month, Time.now.year -# or -day, month, year = Time.now.to_a[3..5] - -tl = Time.now.localtime -printf("The current date is %04d %02d %02d\n", tl.year, tl.month, tl.day) - -Time.now.localtime.strftime("%Y-%m-%d") - - -# @@PLEAC@@_3.2 -Time.local(year, month, day, hour, minute, second).tv_sec -Time.gm(year, month, day, hour, minute, second).tv_sec - - -# @@PLEAC@@_3.3 -sec, min, hour, day, month, year, wday, yday, isdst, zone = Time.at(epoch_secs).to_a - - -# @@PLEAC@@_3.4 -when_ = now + difference # now -> Time ; difference -> Numeric (delta in seconds) -then_ = now - difference - - -# @@PLEAC@@_3.5 -bree = 361535725 -nat = 96201950 - -difference = bree - nat -puts "There were #{difference} seconds between Nat and Bree" - -seconds = difference % 60 -difference = (difference - seconds) / 60 -minutes = difference % 60 -difference = (difference - minutes) / 60 -hours = difference % 24 -difference = (difference - hours) / 24 -days = difference % 7 -weeks = (difference - days) / 7 - -puts "(#{weeks} weeks, #{days} days, #{hours}:#{minutes}:#{seconds})" - - -# @@PLEAC@@_3.6 -monthday, weekday, yearday = date.mday, date.wday, date.yday - -# AFAIK the week number is not just a division since week boundaries are on sundays -weeknum = d.strftime("%U").to_i + 1 - -year = 1981 -month = "jun" # or `6' if you want to emulate a broken language -day = 16 -t = Time.mktime(year, month, day) -print "#{month}/#{day}/#{year} was a ", t.strftime("%A"), "\n" - - -# @@PLEAC@@_3.7 -yyyy, mm, dd = $1, $2, $3 if "1998-06-25" =~ /(\d+)-(\d+)-(\d+)/ - -epoch_seconds = Time.mktime(yyyy, mm, dd).tv_sec - -# dunno an equivalent to Date::Manip#ParseDate - - -# @@PLEAC@@_3.8 -string = Time.at(epoch_secs) -Time.at(1234567890).gmtime # gives: Fri Feb 13 23:31:30 UTC 2009 - -time = Time.mktime(1973, "jan", 18, 3, 45, 50) -print "In localtime it gives: ", time.localtime, "\n" - - -# @@PLEAC@@_3.9 -# Ruby provides micro-seconds in Time object -Time.now.usec - -# Ruby gives the seconds in floating format when substracting two Time objects -before = Time.now -line = gets -elapsed = Time.now - before -puts "You took #{elapsed} seconds." - -# On my Celeron-400 with Linux-2.2.19-14mdk, average for three execs are: -# This Ruby version: average 0.00321 sec -# Cookbook's Perl version: average 0.00981 sec -size = 500 -number_of_times = 100 -total_time = 0 -number_of_times.times { - # populate array - array = [] - size.times { array << rand } - # sort it - begin_ = Time.now - array.sort! - time = Time.now - begin_ - total_time += time -} -printf "On average, sorting %d random numbers takes %.5f seconds\n", - size, (total_time/Float(number_of_times)) - - -# @@PLEAC@@_3.10 -sleep(0.005) # Ruby is definitely not as broken as Perl :) -# (may be interrupted by sending the process a SIGALRM) - - -# @@PLEAC@@_3.11 -#!/usr/bin/ruby -w -# hopdelta - feed mail header, produce lines -# showing delay at each hop. -require 'time' -class MailHopDelta - - def initialize(mail) - @head = mail.gsub(/\n\s+/,' ') - @topline = %w-Sender Recipient Time Delta- - @start_from = mail.match(/^From.*\@([^\s>]*)/)[1] - @date = Time.parse(mail.match(/^Date:\s+(.*)/)[1]) - end - - def out(line) - "%-20.20s %-20.20s %-20.20s %s" % line - end - - def hop_date(day) - day.strftime("%I:%M:%S %Y/%m/%d") - end - - def puts_hops - puts out(@topline) - puts out(['Start', @start_from, hop_date(@date),'']) - @head.split(/\n/).reverse.grep(/^Received:/).each do |hop| - hop.gsub!(/\bon (.*?) (id.*)/,'; \1') - whence = hop.match(/;\s+(.*)$/)[1] - unless whence - warn "Bad received line: #{hop}" - next - end - from = $+ if hop =~ /from\s+(\S+)|\((.*?)\)/ - by = $1 if hop =~ /by\s+(\S+\.\S+)/ - next unless now = Time.parse(whence).localtime - delta = now - @date - puts out([from, by, hop_date(now), hop_time(delta)]) - @date = now - end - end - - def hop_time(secs) - sign = secs < 0 ? -1 : 1 - days, secs = secs.abs.divmod(60 * 60 * 24) - hours,secs = secs.abs.divmod(60 * 60) - mins, secs = secs.abs.divmod(60) - rtn = "%3ds" % [secs * sign] - rtn << "%3dm" % [mins * sign] if mins != 0 - rtn << "%3dh" % [hours * sign] if hours != 0 - rtn << "%3dd" % [days * sign] if days != 0 - rtn - end -end - -$/ = "" -mail = MailHopDelta.new(ARGF.gets).puts_hops - - -# @@PLEAC@@_4.0 -single_level = [ "this", "that", "the", "other" ] - -# Ruby directly supports nested arrays -double_level = [ "this", "that", [ "the", "other" ] ] -still_single_level = [ "this", "that", [ "the", "other" ] ].flatten - - -# @@PLEAC@@_4.1 -a = [ "quick", "brown", "fox" ] -a = %w(Why are you teasing me?) - -lines = <<"END_OF_HERE_DOC".gsub(/^\s*(.+)/, '\1') - The boy stood on the burning deck, - It was as hot as glass. -END_OF_HERE_DOC - -bigarray = IO.readlines("mydatafile").collect { |l| l.chomp } - -name = "Gandalf" -banner = %Q(Speak, #{name}, and welcome!) - -host_info = `host #{his_host}` - -%x(ps #{$$}) - -banner = 'Costs only $4.95'.split(' ') - -rax = %w! ( ) < > { } [ ] ! - - -# @@PLEAC@@_4.2 -def commify_series(arr) - return '' if not arr - case arr.size - when 0 then '' - when 1 then arr[0] - when 2 then arr.join(' and ') - else arr[0..-2].join(', ') + ', and ' + arr[-1] - end -end - -array = [ "red", "yellow", "green" ] - -print "I have ", array, " marbles\n" -# -> I have redyellowgreen marbles - -# But unlike Perl: -print "I have #{array} marbles\n" -# -> I have redyellowgreen marbles -# So, needs: -print "I have #{array.join(' ')} marbles\n" -# -> I have red yellow green marbles - -#!/usr/bin/ruby -# communify_series - show proper comma insertion in list output - -def commify_series(arr) - return '' if not arr - sepchar = arr.find { |p| p =~ /,/ } ? '; ' : ', ' - case arr.size - when 0 then '' - when 1 then arr[0] - when 2 then arr.join(' and ') - else arr[0..-2].join(sepchar) + sepchar + 'and ' + arr[-1] - end -end - -lists = [ - [ 'just one thing' ], - %w(Mutt Jeff), - %w(Peter Paul Mary), - [ 'To our parents', 'Mother Theresa', 'God' ], - [ 'pastrami', 'ham and cheese', 'peanut butter and jelly', 'tuna' ], - [ 'recycle tired, old phrases', 'ponder big, happy thoughts' ], - [ 'recycle tired, old phrases', - 'ponder big, happy thoughts', - 'sleep and dream peacefully' ], -] - -for list in lists do - puts "The list is: #{commify_series(list)}." -end - - -# @@PLEAC@@_4.3 -# (note: AFAIK Ruby doesn't allow gory change of Array length) -# grow the array by assigning nil to past the end of array -ary[new_size-1] = nil -# shrink the array by slicing it down -ary.slice!(new_size..-1) -# init the array with given size -Array.new(number_of_elems) -# assign to an element past the original end enlarges the array -ary[index_new_last_elem] = value - -def what_about_that_array(a) - print "The array now has ", a.size, " elements.\n" - # Index of last element is not really interesting in Ruby - print "Element #3 is `#{a[3]}'.\n" -end -people = %w(Crosby Stills Nash Young) -what_about_that_array(people) - - -# @@PLEAC@@_4.4 -# OO style -bad_users.each { |user| - complain(user) -} -# or, functional style -for user in bad_users - complain(user) -end - -for var in ENV.keys.sort - puts "#{var}=#{ENV[var]}" -end - -for user in all_users - disk_space = get_usage(user) - if (disk_space > MAX_QUOTA) - complain(user) - end -end - -for l in IO.popen("who").readlines - print l if l =~ /^gc/ -end - -# we can mimic the obfuscated Perl way -while fh.gets # $_ is set to the line just read - chomp # $_ has a trailing \n removed, if it had one - split.each { |w| # $_ is split on whitespace - # but $_ is not set to each chunk as in Perl - print w.reverse - } -end -# ...or use a cleaner way -for l in fh.readlines - l.chomp.split.each { |w| print w.reverse } -end - -# same drawback as in problem 1.4, we can't mutate a Numeric... -array.collect! { |v| v - 1 } - -a = [ .5, 3 ]; b = [ 0, 1 ] -for ary in [ a, b ] - ary.collect! { |v| v * 7 } -end -puts "#{a.join(' ')} #{b.join(' ')}" - -# we can mutate Strings, cool; we need a trick for the scalar -for ary in [ [ scalar ], array, hash.values ] - ary.each { |v| v.strip! } # String#strip rules :) -end - - -# @@PLEAC@@_4.5 -# not relevant in Ruby since we have always references -for item in array - # do somethingh with item -end - - -# @@PLEAC@@_4.6 -unique = list.uniq - -# generate a list of users logged in, removing duplicates -users = `who`.collect { |l| l =~ /(\w+)/; $1 }.sort.uniq -puts("users logged in: #{commify_series(users)}") # see 4.2 for commify_series - - -# @@PLEAC@@_4.7 -a - b -# [ 1, 1, 2, 2, 3, 3, 3, 4, 5 ] - [ 1, 2, 4 ] -> [3, 5] - - -# @@PLEAC@@_4.8 -union = a | b -intersection = a & b -difference = a - b - - -# @@PLEAC@@_4.9 -array1.concat(array2) -# if you will assign to another object, better use: -new_ary = array1 + array2 - -members = [ "Time", "Flies" ] -initiates = [ "An", "Arrow" ] -members += initiates - -members = [ "Time", "Flies" ] -initiates = [ "An", "Arrow" ] -members[2,0] = [ "Like", initiates ].flatten - -members[0] = "Fruit" -members[3,2] = "A", "Banana" - - -# @@PLEAC@@_4.10 -reversed = ary.reverse - -ary.reverse_each { |e| - # do something with e -} - -descending = ary.sort.reverse -descending = ary.sort { |a,b| b <=> a } - - -# @@PLEAC@@_4.11 -# remove n elements from front of ary (shift n) -front = ary.slice!(0, n) - -# remove n elements from the end of ary (pop n) -end_ = ary.slice!(-n .. -1) - -# let's extend the Array class, to make that useful -class Array - def shift2() - slice!(0 .. 1) # more symetric with pop2... - end - def pop2() - slice!(-2 .. -1) - end -end - -friends = %w(Peter Paul Mary Jim Tim) -this, that = friends.shift2 - -beverages = %w(Dew Jolt Cola Sprite Fresca) -pair = beverages.pop2 - - -# @@PLEAC@@_4.12 -# use Enumerable#detect (or the synonym Enumerable#find) -highest_eng = employees.detect { |emp| emp.category == 'engineer' } - - -# @@PLEAC@@_4.13 -# use Enumerable#select (or the synonym Enumerable#find_all) -bigs = nums.select { |i| i > 1_000_000 } -pigs = users.keys.select { |k| users[k] > 1e7 } - -matching = `who`.select { |u| u =~ /^gnat / } - -engineers = employees.select { |e| e.position == 'Engineer' } - -secondary_assistance = applicants.select { |a| - a.income >= 26_000 && a.income < 30_000 -} - - -# @@PLEAC@@_4.14 -# normally you would have an array of Numeric (Float or -# Fixnum or Bignum), so you would use: -sorted = unsorted.sort -# if you have strings representing Integers or Floats -# you may specify another sort method: -sorted = unsorted.sort { |a,b| a.to_f <=> b.to_f } - -# let's use the list of my own PID's -`ps ux`.split("\n")[1..-1]. - select { |i| i =~ /^#{ENV['USER']}/ }. - collect { |i| i.split[1] }. - sort { |a,b| a.to_i <=> b.to_i }.each { |i| puts i } -puts "Select a process ID to kill:" -pid = gets.chomp -raise "Exiting ... \n" unless pid && pid =~ /^\d+$/ -Process.kill('TERM', pid.to_i) -sleep 2 -Process.kill('KILL', pid.to_i) - -descending = unsorted.sort { |a,b| b.to_f <=> a.to_f } - - -# @@PLEAC@@_4.15 -ordered = unordered.sort { |a,b| compare(a,b) } - -precomputed = unordered.collect { |e| [compute, e] } -ordered_precomputed = precomputed.sort { |a,b| a[0] <=> b[0] } -ordered = ordered_precomputed.collect { |e| e[1] } - -ordered = unordered.collect { |e| [compute, e] }. - sort { |a,b| a[0] <=> b[0] }. - collect { |e| e[1] } - -for employee in employees.sort { |a,b| a.name <=> b.name } - print employee.name, " earns \$ ", employee.salary, "\n" -end - -# Beware! `0' is true in Ruby. -# For chaining comparisons, you may use Numeric#nonzero?, which -# returns num if num is not zero, nil otherwise -sorted = employees.sort { |a,b| (a.name <=> b.name).nonzero? || b.age <=> a.age } - -users = [] -# getpwent is not wrapped in Ruby... let's fallback -IO.readlines('/etc/passwd').each { |u| users << u.split(':') } -users.sort! { |a,b| a[0] <=> b[0] } -for user in users - puts user[0] -end - -sorted = names.sort { |a,b| a[1, 1] <=> b[1, 1] } -sorted = strings.sort { |a,b| a.length <=> b.length } - -# let's show only the compact version -ordered = strings.collect { |e| [e.length, e] }. - sort { |a,b| a[0] <=> b[0] }. - collect { |e| e[1] } - -ordered = strings.collect { |e| [/\d+/.match(e)[0].to_i, e] }. - sort { |a,b| a[0] <=> b[0] }. - collect { |e| e[1] } - -print `cat /etc/passwd`.collect { |e| [e, e.split(':').indexes(3,2,0)].flatten }. - sort { |a,b| (a[1] <=> b[1]).nonzero? || (a[2] <=> b[2]).nonzero? || a[3] <=> b[3] }. - collect { |e| e[0] } - - -# @@PLEAC@@_4.16 -circular.unshift(circular.pop) # the last shall be first -circular.push(circular.shift) # and vice versa - -def grab_and_rotate(l) - l.push(ret = l.shift) - ret -end - -processes = [1, 2, 3, 4, 5] -while (1) - process = grab_and_rotate(processes) - puts "Handling process #{process}" - sleep 1 -end - - -# @@PLEAC@@_4.17 -def fisher_yates_shuffle(a) - (a.size-1).downto(1) { |i| - j = rand(i+1) - a[i], a[j] = a[j], a[i] if i != j - } -end - -def naive_shuffle(a) - for i in 0...a.size - j = rand(a.size) - a[i], a[j] = a[j], a[i] - end -end - - diff --git a/tests/examplefiles/postgresql_test.txt b/tests/examplefiles/postgresql_test.txt deleted file mode 100644 index 28db5ee3..00000000 --- a/tests/examplefiles/postgresql_test.txt +++ /dev/null @@ -1,81 +0,0 @@ -CREATE OR REPLACE FUNCTION something() RETURNS int4 AS -$x$ -BEGIN - RETURN 42; -END -$x$ -LANGUAGE 'plpgsql'; - -CREATE FUNCTION pymax (a integer, b integer) - RETURNS integer -AS $$ - if a > b: - return a - return b -$$ language plpythonu; - -CREATE FUNCTION nested_lexers (a integer, b integer) -$function$ -BEGIN - SELECT ($1 ~ $q$[\t\r\n\v\\]$q$); -END; -$function$ -LANGUAGE sql; - -CREATE OR REPLACE FUNCTION measurement_insert_trigger() -RETURNS TRIGGER AS $$ -BEGIN - <> - INSERT INTO measurement_y2008m01 VALUES (NEW.*); - RETURN NULL; -END; -$$ -LANGUAGE plpgsql; - --- As returned by pg_dump -CREATE FUNCTION test_function() RETURNS integer - LANGUAGE plpgsql STABLE STRICT - AS $$ -begin - return 42; -end -$$; - --- Unicode names and strings -SELECT U&'\0441\043B\043E\043D' -FROM U&"\0441\043B\043E\043D"; - --- Escapes -SELECT E'1\n2\n3'; - --- DO example from postgresql documentation -/* - * PostgreSQL is Copyright © 1996-2016 by the PostgreSQL Global Development Group. - * - * Postgres95 is Copyright © 1994-5 by the Regents of the University of California. - * - * Permission to use, copy, modify, and distribute this software and its - * documentation for any purpose, without fee, and without a written agreement - * is hereby granted, provided that the above copyright notice and this paragraph - * and the following two paragraphs appear in all copies. - * - * IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR - * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING - * LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, - * EVEN IF THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, - * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - * A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS-IS" BASIS, - * AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, - * SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. - */ -DO $$DECLARE r record; -BEGIN - FOR r IN SELECT table_schema, table_name FROM information_schema.tables - WHERE table_type = 'VIEW' AND table_schema = 'public' - LOOP - EXECUTE 'GRANT ALL ON ' || quote_ident(r.table_schema) || '.' || quote_ident(r.table_name) || ' TO webuser'; - END LOOP; -END$$; diff --git a/tests/examplefiles/pppoe.applescript b/tests/examplefiles/pppoe.applescript deleted file mode 100644 index 4cb380e5..00000000 --- a/tests/examplefiles/pppoe.applescript +++ /dev/null @@ -1,10 +0,0 @@ -tell application "System Events" - tell network preferences - tell current location - set aPPPoEService to a reference to (first service whose kind is 10) - if exists aPPPoEService then - connect aPPPoEService - end if - end tell - end tell -end tell diff --git a/tests/examplefiles/psql_session.txt b/tests/examplefiles/psql_session.txt deleted file mode 100644 index 7096072b..00000000 --- a/tests/examplefiles/psql_session.txt +++ /dev/null @@ -1,122 +0,0 @@ -regression=# select foo; -ERROR: column "foo" does not exist -CONTEXT: PL/pgSQL function "test1" while casting return value to function's return type -LINE 1: select foo; - ^ -regression=# \q - -peter@localhost testdb=> \a \t \x -Output format is aligned. -Tuples only is off. -Expanded display is on. - -regression=# select '\x'; -WARNING: nonstandard use of escape in a string literal -LINE 1: select '\x'; - ^ -HINT: Use the escape string syntax for escapes, e.g., E'\r\n'. - ?column? ----------- - x -(1 row) - -regression=# select E'\x'; - -piro=> \set foo 30; -piro=> select * from test where foo <= :foo; - foo | bar ------+----- - 10 | - 20 | -(2 rows) - -testdb=> \set foo 'my_table' -testdb=> SELECT * FROM :"foo"; - -testdb=> \set content `cat my_file.txt` -testdb=> INSERT INTO my_table VALUES (:'content'); - -regression=# select ( -regression(# 1); - ?column? ----------- - 1 -(1 row) - -piro=> select ( -piro(> ' -piro'> ' || $$ -piro$> $$) -piro-> from " -piro"> foo"; -ERROR: relation " -foo" does not exist -LINE 5: from " - ^ - -testdb=> CREATE TABLE my_table ( -first integer not null default 0, -second text) ; -- end of command -CREATE TABLE - --- Table output -=# SELECT '0x10'::mpz AS "hex", '10'::mpz AS "dec", --# '010'::mpz AS oct, '0b10'::mpz AS bin; - hex | dec | oct | bin ------+-----+-----+----- - 16 | 10 | 8 | 2 -(1 row) - --- One field output -regression=# select schemaname from pg_tables limit 3; - schemaname ------------- - pg_catalog - pg_catalog - pg_catalog -(3 rows) - --- TODO: prompt in multiline comments still not handled correctly -test=> select 1 /* multiline -test*> and 2 /* and 3 */ -test*> end comment */, 2; - ?column? | ?column? -----------+---------- - 1 | 2 - -=# select 10.0, 1e-6, 1E+6; - ?column? | ?column? | ?column? -----------+----------+---------- - 10.0 | 0.000001 | 1000000 -(1 row) - -regression=# begin; -BEGIN -regression=# create table asdf (foo serial primary key); -NOTICE: CREATE TABLE will create implicit sequence "asdf_foo_seq" for serial column "asdf.foo" -NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "asdf_pkey" for table "asdf" -CREATE TABLE -regression=# insert into asdf values (10) returning foo; - foo ------ - 10 -(1 row) - -INSERT 0 1 -regression=# ROLLBACK ; -ROLLBACK - -=> EXPLAIN SELECT * FROM tenk1 --> WHERE unique1 < 100; -- Don't take -> in the plan as a prompt - - QUERY PLAN ------------------------------------------------------------------------------- - Bitmap Heap Scan on tenk1 (cost=2.37..232.35 rows=106 width=244) - Recheck Cond: (unique1 < 100) - -> Bitmap Index Scan on tenk1_unique1 (cost=0.00..2.37 rows=106 width=0) - Index Cond: (unique1 < 100) - - --- don't swallow the end of a malformed line -test=> select 1, -'this line must be emitted' diff --git a/tests/examplefiles/py3_test.txt b/tests/examplefiles/py3_test.txt deleted file mode 100644 index 21fea754..00000000 --- a/tests/examplefiles/py3_test.txt +++ /dev/null @@ -1,2 +0,0 @@ -class Käse: - pass diff --git a/tests/examplefiles/py3tb_test.py3tb b/tests/examplefiles/py3tb_test.py3tb deleted file mode 100644 index 706a540f..00000000 --- a/tests/examplefiles/py3tb_test.py3tb +++ /dev/null @@ -1,4 +0,0 @@ - File "", line 1 - 1+ - ^ -SyntaxError: invalid syntax diff --git a/tests/examplefiles/pycon_ctrlc_traceback b/tests/examplefiles/pycon_ctrlc_traceback deleted file mode 100644 index 4998fd9c..00000000 --- a/tests/examplefiles/pycon_ctrlc_traceback +++ /dev/null @@ -1,118 +0,0 @@ -x = r""" ->>> import os ->>> print os - ->>> for x in range(10): -... y = x + 2 -... print(x) -... if x > 5: -... raise Exception -... -0 -1 -2 -3 -4 -5 -6 -Traceback (most recent call last): - File "", line 5, in -Exception ->>> ->>> while True: -... pass -... -^CTraceback (most recent call last): - File "", line 1, in -KeyboardInterrupt - ->>> class A(Exception):pass -... ->>> class B(Exception):pass -... ->>> try: -... try: -... raise A('first') -... finally: -... raise B('second') -... except A as c: -... print(c) -... -Traceback (most recent call last): - File "", line 3, in -__main__.A: first - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File "", line 5, in -__main__.B: second - ->>> x = - File "", line 1 - x = - ^ -SyntaxError: invalid syntax ->>> - ->>> x = 3 ->>> with 5 as y: -... print(x + y) -... -8 - -# TODO -#>>> raise ValueError('multi\n line\ndetail') -#Traceback (most recent call last): -#........ -#ValueError: multi -# line -#detail - ->>> raise ValueError('multi\n line\ndetail') -Traceback (most recent call last): - .123 -ValueError: multi - line -detail - ->>> raise ValueError('multi\n line\ndetail') -Traceback (most recent call last): - ... -ValueError: multi - line -detail - ->>> raise ValueError('multi\n line\ndetail') -Traceback (most recent call last): - .... -ValueError: multi - line -detail - ->>> raise ValueError('multi\n line\ndetail') -Traceback (most recent call last): - .... -ValueError: multi - line -detail - ->>> raise ValueError('multi\n line\ndetail') -Traceback (most recent call last): - ... -ValueError: multi - line -detail - ->>> raise Exception -Traceback (most recent call last): - File "", line 1, in -Exception ->>> import somemodule ->>> somemodule.blah() -Traceback (most recent call last): - File "", line 1, in - File "/path/to/stuff/somemodule/blah.py", line 658, in blah - raise Exception('Hi.') -Exception: Hi. - diff --git a/tests/examplefiles/pycon_test.pycon b/tests/examplefiles/pycon_test.pycon deleted file mode 100644 index 9c4fc3d3..00000000 --- a/tests/examplefiles/pycon_test.pycon +++ /dev/null @@ -1,17 +0,0 @@ ->>> : - File "", line 1 - : - ^ -SyntaxError: invalid syntax ->>> -KeyboardInterrupt ->>> - ->>> 1/0 -Traceback (most recent call last): - ... -ZeroDivisionError - ->>> 1/0 # this used to swallow the traceback -Traceback (most recent call last): - ... diff --git a/tests/examplefiles/pytb_test2.pytb b/tests/examplefiles/pytb_test2.pytb deleted file mode 100644 index c4d20339..00000000 --- a/tests/examplefiles/pytb_test2.pytb +++ /dev/null @@ -1,2 +0,0 @@ - File "temp.py", line 1 -SyntaxError: Non-ASCII character '\xc3' in file temp.py on line 1, but no encoding declared; see http://www.python.org/peps/pep-0263.html for details diff --git a/tests/examplefiles/pytb_test3.pytb b/tests/examplefiles/pytb_test3.pytb deleted file mode 100644 index 6947c1ef..00000000 --- a/tests/examplefiles/pytb_test3.pytb +++ /dev/null @@ -1,4 +0,0 @@ ->>> 3/"3" -Traceback (most recent call last): - File "", line 1, in -TypeError: unsupported operand type(s) for /: 'int' and 'str' diff --git a/tests/examplefiles/python25-bsd.mak b/tests/examplefiles/python25-bsd.mak deleted file mode 100644 index 51c25967..00000000 --- a/tests/examplefiles/python25-bsd.mak +++ /dev/null @@ -1,234 +0,0 @@ -# New ports collection makefile for: python25 -# Date created: 3 July 2003 -# Whom: Hye-Shik Chang -# -# $FreeBSD: ports/lang/python25/Makefile,v 1.145 2007/10/03 23:22:04 edwin Exp $ - -PORTNAME= python25 -PORTVERSION= 2.5.1 -CATEGORIES= lang python ipv6 -MASTER_SITES= ${PYTHON_MASTER_SITES} -MASTER_SITE_SUBDIR= ${PYTHON_MASTER_SITE_SUBDIR} -DISTFILES= ${PYTHON_DISTFILE} - -MAINTAINER= python@FreeBSD.org -COMMENT?= An interpreted object-oriented programming language - -DIST_SUBDIR= python -WRKSRC= ${PYTHON_WRKSRC}/portbld.static -PATCH_WRKSRC= ${PYTHON_WRKSRC} -GNU_CONFIGURE= yes -CONFIGURE_TARGET= --build=${MACHINE_ARCH}-portbld-freebsd${OSREL} -CONFIGURE_SCRIPT= ../configure # must be relative -CONFIGURE_ENV= OPT="${CFLAGS}" SVNVERSION="echo freebsd" -MAKE_ENV= VPATH="${PYTHON_WRKSRC}" -INSTALLS_SHLIB= yes -INSTALL_TARGET= altinstall -MAN1= ${PYTHON_VERSION}.1 - -USE_PYTHON= yes -PYTHON_VERSION= python2.5 -PYTHON_NO_DEPENDS= yes - -SHARED_WRKSRC= ${PYTHON_WRKSRC}/portbld.shared -PLIST= ${WRKDIR}/PLIST -PLIST_TEMPLATE?=${PKGDIR}/pkg-plist -PLIST_SUB= PYVER=${PYTHON_VERSION:S/python//} \ - PYVER_WITHPAT=${PORTVERSION:S/.c/c/} -DEMODIR= ${PREFIX}/share/examples/${PYTHON_VERSION} -TOOLSDIR= ${PREFIX}/share/${PYTHON_VERSION} - -BIN_SCRIPTS= idle pydoc python python-shared smtpd.py python-config \ - python-shared-config -BINLINKS_SUB= -e 's,smtpd,smtpd${PYTHON_VER},' \ - -e 's,(idle|pydoc|python-shared|python),\1${PYTHON_VER},' - -OPTIONS= THREADS "Enable thread support" on \ - HUGE_STACK_SIZE "Use a larger thread stack" off \ - UCS4 "Use UCS4 for unicode support" on \ - PYMALLOC "Use python's internal malloc" on \ - IPV6 "Enable IPv6 support" on \ - FPECTL "Enable floating point exception handling" off - -.include - -.if ${PYTHON_VERSION} == ${PYTHON_DEFAULT_VERSION} -MLINKS= ${PYTHON_VERSION}.1 python.1 -PLIST_SUB+= IF_DEFAULT="" -.else -PLIST_SUB+= IF_DEFAULT="@comment " -.endif - -# workaround for a bug in base curses.h. -CFLAGS+= -D__wchar_t=wchar_t - -.if !defined(WITHOUT_THREADS) -CONFIGURE_ARGS+= --with-threads -CFLAGS+= ${PTHREAD_CFLAGS} -.if defined(WITHOUT_HUGE_STACK_SIZE) -CFLAGS+= -DTHREAD_STACK_SIZE=0x20000 -.else -CFLAGS+= -DTHREAD_STACK_SIZE=0x100000 -.endif # defined(WITHOUT_HUGE_STACK_SIZE) -CONFIGURE_ENV+= LDFLAGS="${PTHREAD_LIBS} ${LDFLAGS}" -.else -CONFIGURE_ARGS+= --without-threads -.if defined(LDFLAGS) -CONFIGURE_ENV+= LDFLAGS="${LDFLAGS}" -.endif # defined(LDFLAGS) -.endif # !defined(WITHOUT_THREADS) - -.if !defined(WITHOUT_UCS4) && !defined(WITH_UCS2) -CONFIGURE_ARGS+= --enable-unicode=ucs4 -.endif - -.if defined(WITHOUT_PYMALLOC) -CONFIGURE_ARGS+= --without-pymalloc -.endif - -.if ${ARCH} == i386 -PLIST_SUB+= X86_ONLY="" -.else -PLIST_SUB+= X86_ONLY="@comment " -.endif -.if ${ARCH} == amd64 || ${ARCH} == ia64 || ${ARCH} == sparc64 || ${ARCH} == alpha -PLIST_SUB+= 32BIT_ONLY="@comment " -.else -PLIST_SUB+= 32BIT_ONLY="" -.endif -.if ${ARCH} == sparc64 -CFLAGS+= -DPYTHON_DEFAULT_RECURSION_LIMIT=900 -.endif - -.if !exists(/usr/bin/ypcat) # the world with NO_NIS -PLIST_SUB+= NO_NIS="@comment " -.else -PLIST_SUB+= NO_NIS="" -.endif - -.if !defined(WITHOUT_IPV6) -CONFIGURE_ARGS+= --enable-ipv6 -.else -CONFIGURE_ARGS+= --disable-ipv6 -.endif - -.if defined(WITH_FPECTL) -CONFIGURE_ARGS+= --with-fpectl -.endif - -.if ${OSVERSION} >= 700000 -PLATFORMS=plat-freebsd4 plat-freebsd5 plat-freebsd6 -.elif ${OSVERSION} >= 600000 -PLATFORMS=plat-freebsd4 plat-freebsd5 plat-freebsd7 -.else -PLATFORMS=plat-freebsd4 plat-freebsd6 plat-freebsd7 -.endif - -pre-patch: - ${MKDIR} ${WRKSRC} ${SHARED_WRKSRC}/Modules - ${SED} -e '1s,^.*$$,#!${PREFIX}/bin/${PYTHON_VERSION},' \ - ${PATCH_WRKSRC}/Tools/scripts/pydoc > ${WRKDIR}/pydoc2.5 - ${SED} -e '1s,^.*$$,#!${PREFIX}/bin/${PYTHON_VERSION},' \ - ${PATCH_WRKSRC}/Tools/scripts/idle > ${WRKDIR}/idle2.5 - ${SED} -e '1s,^.*$$,#!${PREFIX}/bin/${PYTHON_VERSION},' \ - ${PATCH_WRKSRC}/Lib/smtpd.py > ${WRKDIR}/smtpd2.5.py - ${REINPLACE_CMD} -e \ - 's,/usr/doc/python-docs-,${PREFIX}/share/doc/python,g' \ - ${PATCH_WRKSRC}/Lib/pydoc.py - ${REINPLACE_CMD} -e \ - 's|^\( *prefixes = .*\)\]$$|\1, "${X11BASE}"]|g' \ - ${PATCH_WRKSRC}/Lib/site.py - ${REINPLACE_CMD} -e \ - 's|^ \(..ASDLGEN.*\)$$| ${TRUE}|g' \ - ${PATCH_WRKSRC}/Makefile.pre.in - - ${REINPLACE_CMD} -e \ - 's|*\(..INSTALL_SCRIPT.*\)python-config$$|#port \1|' \ - ${PATCH_WRKSRC}/Makefile.pre.in - - ${SED} -e 's|^#!.*|#!${PREFIX}/bin/${PYTHON_VERSION}|' \ - ${PATCH_WRKSRC}/Misc/python-config.in > ${WRKDIR}/${PYTHON_VERSION}-config - ${SED} -e 's|^#!.*|#!${PREFIX}/bin/${PYTHON_VERSION:S/thon/thon-shared/}|' \ - ${PATCH_WRKSRC}/Misc/python-config.in > ${WRKDIR}/${PYTHON_VERSION:S/thon/thon-shared/}-config - -.if defined(WITH_FPECTL) && ${ARCH} == i386 - ${MKDIR} ${WRKSRC}/Modules - ${ECHO} "fpectl fpectlmodule.c" >> ${WRKSRC}/Modules/Setup.dist -.endif - -post-configure: - ${TAR} -C ${WRKSRC} -cf - . | ${TAR} -C ${SHARED_WRKSRC} -xf - - ${LN} -sf ${PYTHON_WRKSRC}/Lib ${WRKSRC}/Lib - ${SED} -e 's,^\(LDLIBRARY=\).*$$,\1libpython$$(VERSION).so,' \ - -e 's,^\(BLDLIBRARY=\).*$$,\1-L. -lpython$$(VERSION),' \ - -e 's,^\(CFLAGSFORSHARED=\).*$$,\1$$(CCSHARED),' \ - -e 's,^\(Makefile Modules/config.c:.*\)Makefile.pre,\1,' \ - -e 's,^\(.(BUILDPYTHON)\: .*\).(LIBRARY),\1,' \ - -e 's,^\(.(BUILDPYTHON):.*\).(LIBRARY),\1,' \ - ${WRKSRC}/Makefile > ${SHARED_WRKSRC}/Makefile - -pre-build: - cd ${SHARED_WRKSRC}; \ - ${SETENV} ${MAKE_ENV} ${MAKE} lib${PYTHON_VERSION}.so python; \ - ${LN} -f lib${PYTHON_VERSION}.so lib${PYTHON_VERSION}.so.1; \ - ${LN} -f python ${PYTHON_VERSION:S/thon/thon-shared/} - -pre-su-install: -.for platform in ${PLATFORMS} - ${MKDIR} ${PYTHONPREFIX_LIBDIR}/${platform} -.for file in IN.py regen - ${INSTALL_DATA} ${WRKSRC}/Lib/${platform}/${file} \ - ${PYTHONPREFIX_LIBDIR}/${platform}/ -.endfor -.endfor - -pre-install: - ${CAT} ${PLIST_TEMPLATE} | ${AWK} '{ print $$0; } \ - /LIBDIR.*\.py$$/ && !/\/bad/ { print $$0 "o"; print $$0 "c"; }' > ${PLIST} - - @# if openssl 0.9.8 is detected, _sha{256,512} module won't be installed - ([ -f ${WRKSRC}/.without_own_sha ] && \ - ${GREP} -v 'lib-dynload/_sha' ${PLIST} > ${PLIST}.tmp && \ - ${CAT} ${PLIST}.tmp > ${PLIST}) || ${TRUE} - -post-install: - @# install config providers - ${INSTALL_SCRIPT} ${WRKDIR}/${PYTHON_VERSION}-config ${PREFIX}/bin - ${INSTALL_SCRIPT} ${WRKDIR}/${PYTHON_VERSION:S/thon/thon-shared/}-config ${PREFIX}/bin - - @# shared version of executable and library - ${INSTALL_PROGRAM} ${SHARED_WRKSRC}/lib${PYTHON_VERSION}.so.1 \ - ${PREFIX}/lib - cd ${PREFIX}/lib; ${LN} -sf lib${PYTHON_VERSION}.so.1 \ - lib${PYTHON_VERSION}.so - ${LN} -sf ${PREFIX}/lib/lib${PYTHON_VERSION}.so ${PYTHONPREFIX_LIBDIR}/config - ${INSTALL_PROGRAM} \ - ${SHARED_WRKSRC}/${PYTHON_VERSION:S/thon/thon-shared/} \ - ${PREFIX}/bin - - @# additional files installing by ports - ${INSTALL_SCRIPT} ${WRKDIR}/pydoc2.5 ${WRKDIR}/idle2.5 \ - ${WRKDIR}/smtpd2.5.py ${PREFIX}/bin - @${MKDIR} ${MANPREFIX}/man/man1 - ${INSTALL_MAN} ${PYTHON_WRKSRC}/Misc/python.man \ - ${MANPREFIX}/man/man1/${PYTHON_VERSION}.1 - -.if ${PYTHON_VERSION} == ${PYTHON_DEFAULT_VERSION} - for f in ${BIN_SCRIPTS}; do \ - TARGET=`${ECHO_CMD} $$f | ${SED} -E ${BINLINKS_SUB}`; \ - cd ${PREFIX}/bin && ${LN} -f $$TARGET $$f; \ - done -.endif - -.if !defined(NOPORTDOCS) - @${MKDIR} ${TOOLSDIR} - @cd ${PYTHON_WRKSRC}; ${TAR} -cf - Tools | \ - (cd ${TOOLSDIR}; ${TAR} -xf -) - @${MKDIR} ${DEMODIR} - @cd ${PYTHON_WRKSRC}/Demo; ${TAR} -cf - * | \ - (cd ${DEMODIR}; ${TAR} -xf -) -.endif - - @${CAT} ${PKGMESSAGE} - -.include diff --git a/tests/examplefiles/qbasic_example b/tests/examplefiles/qbasic_example deleted file mode 100644 index 27041af6..00000000 --- a/tests/examplefiles/qbasic_example +++ /dev/null @@ -1,2 +0,0 @@ -10 print RIGHT$("hi there", 5) -20 goto 10 diff --git a/tests/examplefiles/qsort.prolog b/tests/examplefiles/qsort.prolog deleted file mode 100644 index d78de6f1..00000000 --- a/tests/examplefiles/qsort.prolog +++ /dev/null @@ -1,13 +0,0 @@ -partition([], _, [], []). -partition([X|Xs], Pivot, Smalls, Bigs) :- - ( X @< Pivot -> - Smalls = [X|Rest], - partition(Xs, Pivot, Rest, Bigs) - ; Bigs = [X|Rest], - partition(Xs, Pivot, Smalls, Rest) - ). - -quicksort([]) --> []. -quicksort([X|Xs]) --> - { partition(Xs, X, Smaller, Bigger) }, - quicksort(Smaller), [X], quicksort(Bigger). diff --git a/tests/examplefiles/r-console-transcript.Rout b/tests/examplefiles/r-console-transcript.Rout deleted file mode 100644 index d0cf34b9..00000000 --- a/tests/examplefiles/r-console-transcript.Rout +++ /dev/null @@ -1,38 +0,0 @@ - -R version 2.9.2 (2009-08-24) -Copyright (C) 2009 The R Foundation for Statistical Computing -ISBN 3-900051-07-0 - -R is free software and comes with ABSOLUTELY NO WARRANTY. -You are welcome to redistribute it under certain conditions. -Type 'license()' or 'licence()' for distribution details. - - Natural language support but running in an English locale - -R is a collaborative project with many contributors. -Type 'contributors()' for more information and -'citation()' on how to cite R or R packages in publications. - -Type 'demo()' for some demos, 'help()' for on-line help, or -'help.start()' for an HTML browser interface to help. -Type 'q()' to quit R. - -[R.app GUI 1.29 (5464) i386-apple-darwin8.11.1] - -> x <- function {} -Error: syntax error -> x <- function() {} -> x <- function() { -+ cat("hello") -+ cat("world") -+ } -> x -function() { -cat("hello") -cat("world") -} -> x() -helloworld -> 2 + 2 -[1] 4 -> \ No newline at end of file diff --git a/tests/examplefiles/r6rs-comments.scm b/tests/examplefiles/r6rs-comments.scm deleted file mode 100644 index cd5c3636..00000000 --- a/tests/examplefiles/r6rs-comments.scm +++ /dev/null @@ -1,23 +0,0 @@ -#!r6rs - -#| - - The FACT procedure computes the factorial - - of a non-negative integer. - -|# - -(define fact - - (lambda (n) - - ;; base case - - (if (= n 0) - - #;(= n 1) - - 1 ; identity of * - - (* n (fact (- n 1)))))) diff --git a/tests/examplefiles/ragel-cpp_rlscan b/tests/examplefiles/ragel-cpp_rlscan deleted file mode 100644 index 4b146329..00000000 --- a/tests/examplefiles/ragel-cpp_rlscan +++ /dev/null @@ -1,280 +0,0 @@ -/* - * Lexes Ragel input files. - * - * @LANG: c++ - * - * Test works with split code gen. - */ - -#include -#include -#include -#include - -using namespace std; - -void escapeXML( const char *data ) -{ - while ( *data != 0 ) { - switch ( *data ) { - case '<': cout << "<"; break; - case '>': cout << ">"; break; - case '&': cout << "&"; break; - default: cout << *data; break; - } - data += 1; - } -} - -void escapeXML( char c ) -{ - switch ( c ) { - case '<': cout << "<"; break; - case '>': cout << ">"; break; - case '&': cout << "&"; break; - default: cout << c; break; - } -} - -void escapeXML( const char *data, int len ) -{ - for ( const char *end = data + len; data != end; data++ ) { - switch ( *data ) { - case '<': cout << "<"; break; - case '>': cout << ">"; break; - case '&': cout << "&"; break; - default: cout << *data; break; - } - } -} - -inline void write( const char *data ) -{ - cout << data; -} - -inline void write( char c ) -{ - cout << c; -} - -inline void write( const char *data, int len ) -{ - cout.write( data, len ); -} - - -%%{ - machine RagelScan; - - word = [a-zA-Z_][a-zA-Z_0-9]*; - integer = [0-9]+; - hex = '0x' [0-9a-fA-F] [0-9a-fA-F]*; - - default = ^0; - EOF = 0; - - # Handles comments in outside code and inline blocks. - c_comment := - ( default* :>> '*/' ) - ${ escapeXML( fc ); } - @{ fret; }; - - action emit { - escapeXML( ts, te-ts ); - } - - # - # Inline action code - # - - ilscan := |* - - "'" ( [^'\\] | /\\./ )* "'" => emit; - '"' ( [^"\\] | /\\./ )* '"' => emit; - '/*' { - write( "/*" ); - fcall c_comment; - }; - '//' [^\n]* '\n' => emit; - - '{' { - write( '{' ); - inline_depth += 1; - }; - - '}' { - write( '}' ); - /* If dropping down to the last } then return - * to ragel code. */ - if ( --inline_depth == 0 ) { - write( "\n" ); - fgoto rlscan; - } - }; - - default => { escapeXML( *ts ); }; - *|; - - # - # Ragel Tokens - # - - rlscan := |* - '}%%' { - if ( !single_line ) { - write( "\n" ); - fgoto main; - } - }; - - '\n' { - if ( single_line ) { - write( "\n" ); - fgoto main; - } - }; - - # Word - word { - write( "" ); - write( ts, te-ts ); - write( "\n" ); - }; - - # Decimal integer. - integer { - write( "" ); - write( ts, te-ts ); - write( "\n" ); - }; - - # Hexidecimal integer. - hex { - write( "" ); - write( ts, te-ts ); - write( "\n" ); - }; - - # Consume comments. - '#' [^\n]* '\n'; - - # Single literal string. - "'" ( [^'\\] | /\\./ )* "'" { - write( "" ); - escapeXML( ts, te-ts ); - write( "\n" ); - }; - - # Double literal string. - '"' ( [^"\\] | /\\./ )* '"' { - write( "" ); - escapeXML( ts, te-ts ); - write( "\n" ); - }; - - # Or literal. - '[' ( [^\]\\] | /\\./ )* ']' { - write( "" ); - escapeXML( ts, te-ts ); - write( "\n" ); - }; - - # Regex Literal. - '/' ( [^/\\] | /\\./ ) * '/' { - write( "" ); - escapeXML( ts, te-ts ); - write( "\n" ); - }; - - # Open an inline block - '{' { - inline_depth = 1; - write( "{" ); - fgoto ilscan; - }; - - punct { - write( "" ); - escapeXML( fc ); - write( "\n" ); - }; - - default; - *|; - - # - # Outside code. - # - - main := |* - - "'" ( [^'\\] | /\\./ )* "'" => emit; - '"' ( [^"\\] | /\\./ )* '"' => emit; - - '/*' { - escapeXML( ts, te-ts ); - fcall c_comment; - }; - - '//' [^\n]* '\n' => emit; - - '%%{' { - write( "
    \n" ); - single_line = false; - fgoto rlscan; - }; - - '%%' { - write( "
    \n" ); - single_line = true; - fgoto rlscan; - }; - - default { - escapeXML( *ts ); - }; - - # EOF. - EOF; - *|; -}%% - -%% write data nofinal; - -void test( const char *data ) -{ - std::ios::sync_with_stdio(false); - - int cs, act; - const char *ts, *te; - int stack[1], top; - - bool single_line = false; - int inline_depth = 0; - - %% write init; - - /* Read in a block. */ - const char *p = data; - const char *pe = data + strlen( data ); - const char *eof = pe; - %% write exec; - - if ( cs == RagelScan_error ) { - /* Machine failed before finding a token. */ - cerr << "PARSE ERROR" << endl; - exit(1); - } -} - -#define BUFSIZE 2048 - -int main() -{ - std::ios::sync_with_stdio(false); - - test("hi %%{ /'}%%'/ { /*{*/ {} } + '\\'' }%%there\n"); - - return 0; -} diff --git a/tests/examplefiles/ragel-cpp_snippet b/tests/examplefiles/ragel-cpp_snippet deleted file mode 100644 index 203ae28b..00000000 --- a/tests/examplefiles/ragel-cpp_snippet +++ /dev/null @@ -1,2 +0,0 @@ - %% write init; - /* Read in a block. */ diff --git a/tests/examplefiles/regex.js b/tests/examplefiles/regex.js deleted file mode 100644 index 7790cb00..00000000 --- a/tests/examplefiles/regex.js +++ /dev/null @@ -1,22 +0,0 @@ -// regex - -blah(/abc/); -x = /abc/; -x = /abc/.match; - -// math - -blah(1/2); //comment -x = 1 / 2 / 3; -x = 1/1/.1; - -// broken - -x=/1/; -x=1/a/g; -x=a/a/g; - -// real-world - -var x = 1/(1+Math.sqrt(sum)); // convert to number between 1-0 -return Math.round((num / den) * 100)/100; diff --git a/tests/examplefiles/resourcebundle_demo b/tests/examplefiles/resourcebundle_demo deleted file mode 100644 index e1daa56a..00000000 --- a/tests/examplefiles/resourcebundle_demo +++ /dev/null @@ -1,9 +0,0 @@ -root:table { - usage:string { "Usage: genrb [Options] files" } - version:int { 122 } - errorcodes:array { - :string { "Invalid argument" } - :string { "File not found" } - :string { "\x00 \r \t \n \u1234" } - } -} diff --git a/tests/examplefiles/reversi.lsp b/tests/examplefiles/reversi.lsp deleted file mode 100644 index fa9a333c..00000000 --- a/tests/examplefiles/reversi.lsp +++ /dev/null @@ -1,427 +0,0 @@ -#!/usr/bin/env newlisp -;; @module reversi.lsp -;; @description a simple version of Reversi: you as white against newLISP as black -;; @version 0.1 alpha August 2007 -;; @author cormullion -;; -;; 2008-10-08 21:46:54 -;; updated for newLISP version 10. (changed nth-set to setf) -;; this now does not work with newLISP version 9! -;; -;; This is my first attempt at writing a simple application using newLISP-GS. -;; The game algorithms are basically by -;; Peter Norvig http://norvig.com/paip/othello.lisp -;; and all I've done is translate to newLISP and add the interface... -;; -;; To-Do: work out how to handle the end of the game properly... -;; To-Do: complete newlispdoc for the functions - -(constant 'empty 0) -(constant 'black 1) -(constant 'white 2) -(constant 'outer 3) ; squares outside the 8x8 board - -(set '*board* '()) ; the master board is a 100 element list -(set '*moves* '()) ; list of moves made - -; these are the 8 different directions from a square on the board - -(set 'all-directions '(-11 -10 -9 -1 1 9 10 11)) - -; return a list of all the playable squares (the 8 by 8 grid inside the 10by10 - -(define (all-squares) - (local (result) - (for (square 11 88) - (if (<= 1 (mod square 10) 8) - (push square result -1))) -result)) - -; make a board - -(define (make-board) - (set '*board* (dup outer 100)) - (dolist (s (all-squares)) - (setf (*board* s) empty))) - -; for testing and working at a terminal - -(define (print-board) - (print { }) - (for (c 1 8) - (print c)) - (set 'c 0) - (for (i 0 99) - (cond - ((= (*board* i) 0) (print {.})) - ((= (*board* i) 1) (print {b})) - ((= (*board* i) 2) (print {w}))) - (if (and (<= i 88) (= (mod (+ i 1) 10) 0)) ; newline - (print "\n" (inc c)))) - (println "\n")) - -; the initial starting pattern - -(define (initial-board) - (make-board) - (setf (*board* 44) white) - (setf (*board* 55) white) - (setf (*board* 45) black) - (setf (*board* 54) black)) - -(define (opponent player) - (if (= player black) white black)) - -(define (player-name player) - (if (= player white) "white" "black")) - -(define (valid-move? move) - (and - (integer? move) - (<= 11 move 88) - (<= 1 (mod move 10) 8))) - -(define (empty-square? square) - (and - (valid-move? square) - (= (*board* square) empty))) - -; test whether a move is legal. The square must be empty -; and it must flip at least one of the opponent's piece - -(define (legal-move? move player) - (and - (empty-square? move) - (exists (fn (dir) (would-flip? move player dir)) all-directions))) - -; would this move by player result in any flips in the given direction? -; if so, return the number of the 'opposite' (bracketing) piece's square - -(define (would-flip? move player dir) - (let - ((c (+ move dir))) - (and - (= (*board* c) (opponent player)) - (find-bracketing-piece (+ c dir) player dir)))) - -(define (find-bracketing-piece square player dir) - ; return the square of the bracketing piece, if any - (cond - ((= (*board* square) player) square) - ((= (*board* square) (opponent player)) - (find-bracketing-piece (+ square dir) player dir)) - (true nil))) - -(define (make-flips move player dir) - (let - ((bracketer (would-flip? move player dir)) - (c (+ move dir))) - (if bracketer - (do-until (= c bracketer) - (setf (*board* c) player) - (push c *flips* -1) - (inc c dir))))) - -; make the move on the master game board, not yet visually - -(define (make-move move player) - (setf (*board* move) player) - (push move *moves* -1) - (set '*flips* '()) ; we're going to keep a record of the flips made - (dolist (dir all-directions) - (make-flips move player dir))) - -(define (next-to-play previous-player) - (let ((opp (opponent previous-player))) - (cond - ((any-legal-move? opp) opp) - ((any-legal-move? previous-player) - (println (player-name opp) " has no moves") - previous-player) - (true nil)))) - -; are there any legal moves (returns first) for this player? -(define (any-legal-move? player) - (exists (fn (move) (legal-move? move player)) - (all-squares))) - -; a list of all legal moves might be useful -(define (legal-moves player) - (let ((result '())) - (dolist (move (all-squares)) - (if (legal-move? move player) - (push move result))) - (unique result))) - -; define any number of strategies that can be called on to calculate -; the next computer move. This is the only one I've done... - make -; any legal move at random! - -(define (random-strategy player) - (seed (date-value)) - (apply amb (legal-moves player))) - -; get the next move using a particular strategy - -(define (get-move strategy player) - (let ((move (apply strategy (list player)))) - (cond - ((and - (valid-move? move) - (legal-move? move player)) - (make-move move player)) - (true - (println "no valid or legal move for " (player-name player) ) - nil)) - move)) - -; that's about all the game algorithms for now -; now for the interface - -(if (= ostype "Win32") - (load (string (env "PROGRAMFILES") "/newlisp/guiserver.lsp")) - (load "/usr/share/newlisp/guiserver.lsp") -) - -(gs:init) -(map set '(screen-width screen-height) (gs:get-screen)) -(set 'board-width 540) -; center on screen -(gs:frame 'Reversi (- (/ screen-width 2) (/ board-width 2)) 60 board-width 660 "Reversi") -(gs:set-border-layout 'Reversi) - -(gs:canvas 'MyCanvas 'Reversi) - (gs:set-background 'MyCanvas '(.8 .9 .7 .8)) - (gs:mouse-released 'MyCanvas 'mouse-released-action true) - -(gs:panel 'Controls) - (gs:button 'Start 'start-game "Start") - -(gs:panel 'Lower) - (gs:label 'WhiteScore "") - (gs:label 'BlackScore "") - -(gs:add-to 'Controls 'Start ) -(gs:add-to 'Lower 'WhiteScore 'BlackScore) -(gs:add-to 'Reversi 'MyCanvas "center" 'Controls "north" 'Lower "south") - -(gs:set-anti-aliasing true) -(gs:set-visible 'Reversi true) - -; size of board square, and radius/width of counter -(set 'size 60 'width 30) - -; initialize the master board - -(define (initial-board) - (make-board) - (setf (*board* 44) white) - (setf (*board* 55) white) - (setf (*board* 45) black) - (setf (*board* 54) black) -) - -; draw a graphical repesentation of the board - -(define (draw-board) - (local (x y) - (dolist (i (all-squares)) - (map set '(x y) (square-to-xy i)) - (gs:draw-rect - (string x y) - (- (* y size) width ) ; !!!!!! - (- (* x size) width ) - (* width 2) - (* width 2) - gs:white)))) - -(define (draw-first-four-pieces) - (draw-piece 44 "white") - (draw-piece 55 "white") - (draw-piece 45 "black") - (draw-piece 54 "black")) - -; this next function can mark the legal moves available to a player - -(define (show-legal-moves player) - (local (legal-move-list x y) - (set 'legal-move-list (legal-moves player)) - (dolist (m (all-squares)) - (map set '(x y) (square-to-xy m)) - (gs:draw-rect - (string x y) - (- (* y size) width ) ; !!!!!! - (- (* x size) width ) - (* width 2) - (* width 2) - (if (find m legal-move-list) gs:blue gs:white) - ) - ) - ) -) - -; convert the number of a square on the master board to coordinates - -(define (square-to-xy square) - (list (/ square 10) (mod square 10))) - -; draw one of the pieces - -(define (draw-piece square colour) - (local (x y) - (map set '(x y) (square-to-xy square)) - (cond - ((= colour "white") - (gs:fill-circle - (string x y) - (* y size) ; !!!!!!! y first, cos y is x ;-) - (* x size) - width - gs:white)) - - ((= colour "black") - (gs:fill-circle - (string x y) - (* y size) - (* x size) - width - gs:black)) - - ((= colour "empty") - (gs:draw-rect - (string x y) - (- (* y size) width ) - (- (* x size) width ) - (* width 2) - (* width 2) - gs:white)) - ))) - -; animate the pieces flipping - -(define (flip-piece square player) -; flip by drawing thinner and fatter ellipses -; go from full disk in opposite colour to invisible -; then from invisible to full disk in true colour - (local (x y colour) - (map set '(x y) (square-to-xy square)) - ; delete original piece - (gs:delete-tag (string x y)) - (set 'colour (if (= player 2) gs:black gs:white )) - (for (i width 1 -3) - (gs:fill-ellipse - (string x y {flip} i) - (* y size) ; y first :-) !!! - (* x size) - i - width - colour) - (sleep 20) ; this might need adjusting... - (gs:delete-tag (string x y {flip} i)) - ) - (set 'colour (if (= player 2) gs:white gs:black)) - (for (i 1 width 3) - (gs:fill-ellipse - (string x y {flip} i) - (* y size) ; :-) !!! - (* x size) - i - width - colour) - (sleep 20) - (gs:delete-tag (string x y {flip} i)) - ) - ; draw the piece again - (gs:fill-circle - (string x y) - (* y size) - (* x size) - width - colour) - ) -) - -(define (do-move move player) - (cond - ; check if the move is good ... - ((and (!= player nil) - (valid-move? move) - (legal-move? move player)) - - ; ... play it - ; make move on board - (make-move move player) - ; and on screen - (draw-piece move (player-name player)) - (gs:update) - ; do flipping stuff - - ; wait for a while - (sleep 1000) - - ; then do flipping - (dolist (f *flips*) - (flip-piece f player)) - - (inc *move-number*) - (draw-piece move (player-name player)) - (gs:update) - - ; update scores - (gs:set-text 'WhiteScore - (string "White: " (first (count (list white) *board*)))) - (gs:set-text 'BlackScore - (string "Black: " (first (count (list black) *board*)))) - ) - ; or return nil - (true - nil))) - -; the game is driven by the mouse clicks of the user -; in reply, the computer plays a black piece -; premature clicking is possible and possibly a bad thing... - -(define (mouse-released-action x y button modifiers tags) - ; extract the tag of the clicked square - (set 'move (int (string (first tags)) 0 10)) - (if (do-move move player) - (begin - (set 'player (next-to-play player)) - ; there is a training mode - legal squares are highlighted - ; you can uncomment the next line... - ; (show-legal-moves player) - (gs:update) - - ; wait for black's reply - (gs:set-cursor 'Reversi "wait") - (gs:set-text 'Start "black's move - thinking...") - ; give the illusion of Deep Thought... - (sleep 2000) - ; black's reply - ; currently only the random strategy has been defined... - (set 'strategy random-strategy) - (set 'move (apply strategy (list player))) - (do-move move player) - (set 'player (next-to-play player)) - ; (show-legal-moves player) ; to see black's moves - (gs:set-text 'Start "your move") - (gs:set-cursor 'Reversi "default") - (gs:update)))) - -(define (start-game) - (gs:set-text 'Start "Click a square to place a piece!") - (gs:disable 'Start) - (set 'player white)) - -(define (start) - (gs:set-text 'Start "Start") - (gs:enable 'Start) - (set '*move-number* 1 - '*flips* '()) - (initial-board) - (draw-board) - (draw-first-four-pieces)) - -(start) - -(gs:listen) \ No newline at end of file diff --git a/tests/examplefiles/rnc_example.rnc b/tests/examplefiles/rnc_example.rnc deleted file mode 100644 index a1440302..00000000 --- a/tests/examplefiles/rnc_example.rnc +++ /dev/null @@ -1,33 +0,0 @@ -# This is a sample RNC file from the tutorial for the 2003 Working Draft -# http://relaxng.org/compact-tutorial-20030326.html - -element html { - element head { - element title { text } - }, - element body { - element table { - attribute class { "addressBook" }, - element tr { - attribute class { "card" }, - element td { - attribute class { "name" }, - mixed { - element span { - attribute class { "givenName" }, - text - }?, - element span { - attribute class { "familyName" }, - text - }? - } - }, - element td { - attribute class { "email" }, - text - } - }+ - } - } -} diff --git a/tests/examplefiles/roboconf.graph b/tests/examplefiles/roboconf.graph deleted file mode 100644 index e5fdedff..00000000 --- a/tests/examplefiles/roboconf.graph +++ /dev/null @@ -1,40 +0,0 @@ -################## -# A sample graph -################## - -import some-definition.graph; -import another-definition.graph; - -VM { - installer : target; - children: deployable; -} - -facet deployable { - # nothing -} - -# Sample deployables -mysql { - insTaller: puppet; - facets: deployable; - exports: ip, port = 3306; -} - -tomcat { - installer: bash; - facets: deployable; - exports: ip; - children: web-application; -} - -facet web-application { - exports: full-path = undefined; -} - -my-war-1 { - facets: web-application; - installer: file; - exports: full-path = apps/my-war-1; # the relative path - imports: mysql.*; -} diff --git a/tests/examplefiles/roboconf.instances b/tests/examplefiles/roboconf.instances deleted file mode 100644 index c69a2ab0..00000000 --- a/tests/examplefiles/roboconf.instances +++ /dev/null @@ -1,24 +0,0 @@ - -# Deal with imports -import others.instances; - -instance of VM { - name: VM-mysql; - instance of mysql { - name: MySQL; - } -} - -instance of VM { - name: VM ; - count: 5; - - INSTANCE of tomcat { - name: Tomcat; - - instance of my-war-1 { - name: my-war-1; - full-path: apps/my-war; - } - } -} diff --git a/tests/examplefiles/robotframework_test.txt b/tests/examplefiles/robotframework_test.txt deleted file mode 100644 index 0d8179c0..00000000 --- a/tests/examplefiles/robotframework_test.txt +++ /dev/null @@ -1,40 +0,0 @@ -*** Settings *** -Documentation Simple example demonstrating syntax highlighting. -Library ExampleLibrary -Test Setup Keyword argument argument with ${VARIABLE} - -*** Variables *** -${VARIABLE} Variable value -@{LIST} List variable here -&{DICT} Key1=Value1 Key2=Value2 - -*** Test Cases *** -Keyword-driven example - Initialize System - Do Something - Result Should Be 42 - [Teardown] Cleanup System - -Data-driven example - [Template] Keyword - argument1 argument2 - argument ${VARIABLE} - @{LIST} - -Gherkin - Given system is initialized - When something is done - Then result should be "42" - -| Pipes | -| | [Documentation] | Also pipe separated format is supported. | -| | Log | As this example demonstrates. | - -*** Keywords *** -Result Should Be - [Arguments] ${expected} - ${actual} = Get Value - Should be Equal ${actual} ${expected} - -Then result should be "${expected}" - Result Should Be ${expected} diff --git a/tests/examplefiles/rql-queries.rql b/tests/examplefiles/rql-queries.rql deleted file mode 100644 index 1d86df3c..00000000 --- a/tests/examplefiles/rql-queries.rql +++ /dev/null @@ -1,34 +0,0 @@ -Any N, N2 where N is Note, N2 is Note, N a_faire_par P1, P1 nom 'john', N2 a_faire_par P2, P2 nom 'jane' ; -DISTINCT Any N, D, C, T, A ORDERBY D DESC LIMIT 40 where N is Note, N diem D, W is Workcase, W concerned_by N, N cost C, N text T, N author A, N diem <= today -Bookmark B WHERE B owned_by G, G eid 5; -Any X WHERE E eid 22762, NOT E is_in X, X modification_date D ORDERBY D DESC LIMIT 41; -Any A, R, SUB ORDERBY R WHERE A is "Workcase", S is Division, S concerned_by A, A subject SUB, S eid 85, A ref R; -Any D, T, L WHERE D is Document, A concerned_by D,A eid 14533, D title T, D location L; -Any N,A,B,C,D ORDERBY A DESC WHERE N is Note, W concerned_by N, W eid 14533, N diem A,N author B,N text C,N cost D; -Any X ORDERBY D DESC LIMIT 41 WHERE E eid 18134, NOT E concerned_by X, X modification_date D -DISTINCT Any N, D, C, T, A ORDERBY D ASC LIMIT 40 WHERE N is Note, N diem D, P is Person, N to_be_contacted_by G, N cost C, N text T, N author A, G login "john"; -INSERT Person X: X surname "Doe", X firstname "John"; -Workcase W where W ref "ABCD12"; -Workcase W where W ref LIKE "AB%"; -Any X WHERE X X eid 53 -Any X WHERE X Document X occurence_of F, F class C, C name 'Comics' X owned_by U, U login 'syt' X available true -Person P WHERE P work_for P, S name 'Acme', P interested_by T, T name 'training' -Note N WHERE N written_on D, D day> (today -10), N written_by P, P name 'joe' or P name 'jack' -Person P WHERE (P interested_by T, T name 'training') or (P city 'Paris') -Any N, P WHERE X is Person, X name N, X first_name P -String N, P WHERE X is Person, X name N, X first_name P -INSERT Person X: X name 'widget' -INSERT Person X, Person Y: X name 'foo', Y name 'nice', X friend Y -INSERT Person X: X name 'foo', X friend Y WHERE name 'nice' -SET X name 'bar', X first_name 'original' where X is Person X name 'foo' -SET X know Y WHERE X friend Y -DELETE Person X WHERE X name 'foo' -DELETE X friend Y WHERE X is Person, X name 'foo' -Any X WHERE X name LIKE '%lt' -Any X WHERE X name IN ( 'joe', 'jack', 'william', 'averell') -Any X, V WHERE X concerns P, P eid 42, X corrected_in V? -Any C, P WHERE C is Card, P? documented_by C -Point P where P abs X, P ord Y, P value X+Y -Document X where X class C, C name 'Cartoon', X owned_by U, U login 'joe', X available true -(Any X WHERE X is Document) UNION (Any X WHERE X is File) -Any A,B WHERE A creation_date B WITH A BEING (Any X WHERE X is Document) UNION (Any X WHERE X is File) diff --git a/tests/examplefiles/ruby_func_def.rb b/tests/examplefiles/ruby_func_def.rb deleted file mode 100644 index a820c68f..00000000 --- a/tests/examplefiles/ruby_func_def.rb +++ /dev/null @@ -1,11 +0,0 @@ -class (get_foo("blub"))::Foo - def (foo("bar") + bar("baz")).something argh, aaahaa - 42 - end -end - -class get_the_fuck("out")::Of::My - def parser_definition - ruby! - end -end diff --git a/tests/examplefiles/sample.qvto b/tests/examplefiles/sample.qvto deleted file mode 100644 index 6241ee23..00000000 --- a/tests/examplefiles/sample.qvto +++ /dev/null @@ -1,4 +0,0 @@ -transformation Foo(uml: SimpleUML, - rdbms : SimpleRDBMS) { -} -/* comment */ diff --git a/tests/examplefiles/scilab.sci b/tests/examplefiles/scilab.sci deleted file mode 100644 index 8dea7b9c..00000000 --- a/tests/examplefiles/scilab.sci +++ /dev/null @@ -1,30 +0,0 @@ -// Scilab ( http://www.scilab.org/ ) -// Copyright (C) INRIA - Serge STEER -// - -function I=sub2ind(dims,varargin) -//sub2ind is used to determine the equivalent single index -//corresponding to a given set of subscript values. - -//I = sub2ind(dims,i1,i2,..) returns the linear index equivalent to the -//row, column, ... subscripts in the arrays i1,i2,.. for an matrix of -//size dims. - -//I = sub2ind(dims,Mi) returns the linear index -//equivalent to the n subscripts in the columns of the matrix Mi for a matrix -//of size dims. - - d=[1;cumprod(matrix(dims(1:$-1),-1,1))] - for i=1:size(varargin) - if varargin(i)==[] then I=[],return,end - end - - if size(varargin)==1 then //subindices are the columns of the argument - I=(varargin(1)-1)*d+1 - else //subindices are given as separated arguments - I=1 - for i=1:size(varargin) - I=I+(varargin(i)-1)*d(i) - end - end -endfunction diff --git a/tests/examplefiles/scope.cirru b/tests/examplefiles/scope.cirru deleted file mode 100644 index c3d1a2c6..00000000 --- a/tests/examplefiles/scope.cirru +++ /dev/null @@ -1,237 +0,0 @@ - --- demo - -define a (read cd) $ if (> a cd) - print demo - print "not demo" - -say $ print a $ save $ b $ x $ c 8 - -print fun - --- test on folding - -a $ - -b $ c - -d $ e $ f - -g $ h $ i j $ k $ - --- test on comma - -print (, a) - a - , b - , c (, d) - --- test on HTML - -doctype - -html - head - title $ = Cirru - script (:defer) $ :src build/build.js - link (:rel stylesheet) $ :href css/page.css - link (:rel icon) - :href http://logo.cirru.org/cirru-32x32.png?v=3 - body - textarea.demo.source $ :placeholder "Source Code" - textarea.demo.target $ :placeholder "Compiled Data" - @insert ../html/ga.html - --- test on indentation - -a $ b $ c - -e f - (g) - h - --- test on parentheses - -3 4 (1) 4 - -((((1)))) - -x - --- test on quotes - -a b c d - -"a b c d" - -"a b \" c d" - -"a b" "c d" - --- test on unfolding - -set - add 1 $ - , x y - add 5 $ - add 2 - --- test on HTML attributes - -div - div - :class a - div - :class a b c d - - div - :class a (@ b) (@ c) d - - div - :class a - @if (@ b) - div b - div c - div - :class a - @if (@ b) b c - --- test on helpers - -@if (@call a b) (div) (span) - -@each members - div (@ name) - -@each a - div (@ b) - @each c - div (@ d) - --- test on HTML structure - -@rich more - #demo-more-box - #demo-more - :data-lang-text demo-more - #demo-more-list - @each room - .demo-more-room - span.demo-name - @ topic - span.demo-join - :data-lang-text demo-join - :data-id (@ id) - --- text on bool - -print #true -print #false -print #yes -print #no -print #t -print #f - --- test on Cirru js - -set a 1 -set a (= "This is a string") -set b #t - --- this is comment - -number 1.4 -string x -regex ^\s$ -regex "^\\s-\"$" -sentence this is a string - -array 1 2 3 (= nothing) #t (= #t) - -set c (array 1 (= nothing)) - -set d $ object (a (= google)) - b (= reader) - c 1 - d $ array 1 2 (= string) - -1 c --1 c - -:b d -.log console a 2 -.log console - -set demo $ object - call $ \ x (.log console x) (. this call) -. demo (.call 1) (.call 4) - -=.x d 3 - -set d null - -new Array 1 2 3 - -set x (:length c) -set str (= str) -set c (.toUpperCase str) - -\ x (+ x 1) -\ (x y) (+ x y) -\ x (set aa 1) (+ aa x) - -set f (\ x (+ x 1)) - -+ a 1 2 -+= a 1 - -> 1 2 3 - -if (> 2 1) (+ a 1) -else 2 - -if (> a 2) - .log console (= "large") -elseif (> a 1) - .log console (= "still good") -else - .log console (= "so so") - -set a $ if (> 2 1) #t #f - -switch a - 1 (.log console 1) - 2 (.log console 2) - else (.log console (= "something else")) - -set a $ array 2 +3 -4 -for (a x i) (.log console x i) - -set a 0 -while (< a 10) (+= a 1) (.log console a) - --- WebAssembly variable names - --- ":(c) 2015 Andreas Rossberg" - -module - export :even $even - export "odd" $odd - - func $even (param $n i32) (result i32) - if (i32.eq (get_local $n) (i32.const 0)) - i32.const 1 - call $odd (i32.sub (get_local $n) (i32.const 1)) - - func $odd (param $n i32) (result i32) - store_global $scratch (get_local $n) - if (i32.eq (get_local $n) (i32.const 0) - i32.const 0 - call $even (i32.sub (get_local $n) (i32.const 1)) - - global $scratch i32 - -assert_eq (invoke :even (i32.const 13)) (i32.const 0) -assert_eq (invoke :even (i32.const 20)) (i32.const 1) -assert_eq (invoke :odd (i32.const 13)) (i32.const 1) -assert_eq (invoke :odd (i32.const 20)) (i32.const 0) diff --git a/tests/examplefiles/session.dylan-console b/tests/examplefiles/session.dylan-console deleted file mode 100644 index 6f289c8e..00000000 --- a/tests/examplefiles/session.dylan-console +++ /dev/null @@ -1,9 +0,0 @@ -? 7 * 52; -=> 364 -? define variable *your-variable* = $foo; -? begin - let yours = "apple"; - let mine = yours; - mine == yours; - end; -=> #t diff --git a/tests/examplefiles/sibling.prolog b/tests/examplefiles/sibling.prolog deleted file mode 100644 index bc591502..00000000 --- a/tests/examplefiles/sibling.prolog +++ /dev/null @@ -1,19 +0,0 @@ -/* Comments /* can nest */ -still a comment -*/ - -:- module(maplist, maplist/3) - -assert(world:done). % asserts - -sibling(X, Y) :- parent_child(Z, X), parent_child(Z, Y). - -parent_child(X, Y) :- father_child(X, Y). -parent_child(X, Y) :- mother_child(X, Y). - -mother_child(trude, sally). - -father_child(tom, sally). -father_child(tom, erica). -father_child(mike, tom). - diff --git a/tests/examplefiles/simple.camkes b/tests/examplefiles/simple.camkes deleted file mode 100644 index 43e11732..00000000 --- a/tests/examplefiles/simple.camkes +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Example input for CAmkES lexer. - */ - -import ; - -// A single-line comment. - -import "components/Client/Client.camkes"; -import "components/Echo/Echo.camkes"; - -component Foo { - include "proc_defn.h"; - control; - dataport Buf my_port; -} - -#ifdef BAR_AVAILABLE - component Bar { - provides CharAccess ca; - } -#endif - - #define HASH_DEF_WITH_LEADING_SPACE - -assembly { /* Another multiline comment. */ - composition { - component Echo echo; - component Client client; - - connection seL4RPC simple(from client.s, to echo.s); - } - - configuration { - echo.dma_pool = 4096; - } -} - diff --git a/tests/examplefiles/simple.croc b/tests/examplefiles/simple.croc deleted file mode 100644 index 8f12771a..00000000 --- a/tests/examplefiles/simple.croc +++ /dev/null @@ -1,747 +0,0 @@ -module simple; - -// Importing stuff. -{ - function loadMod(name, ns) - { - assert(name == "mod"); - - ns.x = "I'm x"; - - ns.foo = function foo() - { - writefln("foo"); - }; - - ns.bar = function bar(x) - { - return x[0]; - }; - - ns.baz = function baz() - { - writefln(x); - }; - - foreach(k, v; ns) - if(isFunction(v)) - v.environment(ns); - } - - setModuleLoader("mod", loadMod); - - import mod : foo, bar; - foo(); - writefln(bar([5])); - mod.baz(); - - writefln(); -} - -// Super calls. -{ - class Base - { - function fork() - { - writefln("Base fork."); - } - } - - class Derived : Base - { - function fork() - { - writefln("Derived fork!"); - super.fork(); - } - } - - local d = Derived(); - d.fork(); - - writefln(); -} - -// Coroutines and coroutine iteration. -{ - local countDown = coroutine function countDown(x) - { - yield(); - - while(x > 0) - { - yield(x); - x--; - } - }; - - foreach(v; countDown, 5) - writefln(v); - - writefln(); - - local forEach = coroutine function forEach(t) - { - yield(); - - foreach(k, v; t) - yield(k, v); - }; - - foreach(_, k, v; forEach, {hi = 1, bye = 2}) - writefln("key: ", k, ", value: ", v); - - writefln(); -} - -// Testing tailcalls. -{ - function recurse(x) - { - writefln("recurse: ", x); - - if(x == 0) - return toString(x); - else - return recurse(x - 1); - } - - writefln(recurse(5)); - writefln(); - - class A - { - function f(x) - { - writefln("A.f: ", x); - - if(x == 0) - return toString(x); - else - return this.f(x - 1); // call it as this.f to force a 'method' instruction to be generated - } - } - - local a = A(); - writefln(a.f(5)); - writefln(); -} - -{ - // A function which lets us define properties for a class. - // The varargs should be a bunch of tables, each with a 'name' field, and 'getter' and/or 'setter' fields. - function mixinProperties(classType, vararg) - { - classType.mProps = { }; - - classType.opIndex = function opIndex(key) - { - local prop = mProps[key]; - - if(prop is null) - throw format(classType, ".opIndex() - Property '%s' does not exist", key); - - local getter = prop.getter; - - if(getter is null) - throw format(classType, ".opIndex() - Property '%s' has no getter", key); - - return getter(with this); - }; - - classType.opIndexAssign = function opIndexAssign(key, value) - { - local prop = mProps[key]; - - if(prop is null) - throw format(classType, ".opIndexAssign() - Property '%s' does not exist", key); - - local setter = prop.setter; - - if(setter is null) - throw format(classType, ".opIndexAssign() - Property '%s' has no setter", key); - - setter(with this, value); - }; - - foreach(i, prop; [vararg]) - { - if(!isTable(prop)) - throw format("mixinProperties() - property ", i, " is not a table"); - - if(prop.name is null) - throw format("mixinProperties() - property ", i, " has no name"); - - if(prop.setter is null && prop.getter is null) - throw format("mixinProperties() - property '%s' has no getter or setter", prop.name); - - classType.mProps[prop.name] = prop; - } - } - - // Create a class to test out. - class PropTest - { - mX = 0; - mY = 0; - mName = ""; - - function constructor(name) - { - mName = name; - } - - function toString() - { - return format("name = '", mName, "' x = ", mX, " y = ", mY); - } - } - - // Mix in the properties. - mixinProperties - ( - PropTest, - - { - name = "x", - - function setter(value) - { - mX = value; - } - - function getter() - { - return mX; - } - }, - - { - name = "y", - - function setter(value) - { - mY = value; - } - - function getter() - { - return mY; - } - }, - - { - name = "name", - - function getter() - { - return mName; - } - } - ); - - // Create an instance and try it out. - local p = PropTest("hello"); - - writefln(p); - p.x = 46; - p.y = 123; - p.x = p.x + p.y; - writefln(p); - - // Try to access a nonexistent property. - try - p.name = "crap"; - catch(e) - { - writefln("caught: ", e); - writefln(getTraceback()); - } - - writefln(); -} - -// Some container classes. -{ - class PQ - { - mData; - mLength = 0; - - function constructor() - { - mData = array.new(15); - } - - function insert(data) - { - resizeArray(); - mData[mLength] = data; - - local index = mLength; - local parentIndex = (index - 1) / 2; - - while(index > 0 && mData[parentIndex] > mData[index]) - { - local temp = mData[parentIndex]; - mData[parentIndex] = mData[index]; - mData[index] = temp; - - index = parentIndex; - parentIndex = (index - 1) / 2; - } - - mLength += 1; - } - - function remove() - { - if(mLength == 0) - throw "PQ.remove() - No items to remove"; - - local data = mData[0]; - mLength -= 1; - mData[0] = mData[mLength]; - - local index = 0; - local left = 1; - local right = 2; - - while(index < mLength) - { - local smaller; - - if(left >= mLength) - { - if(right >= mLength) - break; - else - smaller = right; - } - else - { - if(right >= mLength) - smaller = left; - else - { - if(mData[left] < mData[right]) - smaller = left; - else - smaller = right; - } - } - - if(mData[index] > mData[smaller]) - { - local temp = mData[index]; - mData[index] = mData[smaller]; - mData[smaller] = temp; - - index = smaller; - left = (index * 2) + 1; - right = left + 1; - } - else - break; - } - - return data; - } - - function resizeArray() - { - if(mLength >= #mData) - mData.length((#mData + 1) * 2 - 1); - } - - function hasData() - { - return mLength != 0; - } - } - - class Stack - { - mHead = null; - - function push(data) - { - local t = { data = data, next = mHead }; - mHead = t; - } - - function pop() - { - if(mHead is null) - throw "Stack.pop() - No items to pop"; - - local item = mHead; - mHead = mHead.next; - - return item.data; - } - - function hasData() - { - return mHead !is null; - } - } - - class Queue - { - mHead = null; - mTail = null; - - function push(data) - { - local t = { data = data, next = null }; - - if(mTail is null) - { - mHead = t; - mTail = t; - } - else - { - mTail.next = t; - mTail = t; - } - } - - function pop() - { - if(mTail is null) - throw "Queue.pop() - No items to pop"; - - local item = mHead; - mHead = mHead.next; - - if(mHead is null) - mTail = null; - - return item.data; - } - - function hasData() - { - return mHead !is null; - } - } - - writefln("Priority queue (heap)"); - - local prioQ = PQ(); - - for(i : 0 .. 10) - prioQ.insert(math.rand(0, 20)); - - while(prioQ.hasData()) - writefln(prioQ.remove()); - - writefln(); - writefln("Stack"); - - local stack = Stack(); - - for(i : 0 .. 5) - stack.push(i + 1); - - while(stack.hasData()) - writefln(stack.pop()); - - writefln(); - writefln("Queue"); - - local queue = Queue(); - - for(i : 0 .. 5) - queue.push(i + 1); - - while(queue.hasData()) - writefln(queue.pop()); - - writefln(); -} - -// opApply tests. -{ - class Test - { - mData = [4, 5, 6]; - - function opApply(extra) - { - if(isString(extra) && extra == "reverse") - { - local function iterator_reverse(index) - { - index--; - - if(index < 0) - return; - - return index, mData[index]; - } - - return iterator_reverse, this, #mData; - } - else - { - local function iterator(index) - { - index++; - - if(index >= #mData) - return; - - return index, mData[index]; - } - - return iterator, this, -1; - } - } - } - - local test = Test(); - - foreach(k, v; test) - writefln("test[", k, "] = ", v); - - writefln(); - - foreach(k, v; test, "reverse") - writefln("test[", k, "] = ", v); - - writefln(); - - test = - { - fork = 5, - knife = 10, - spoon = "hi" - }; - - foreach(k, v; test) - writefln("test[", k, "] = ", v); - - test = [5, 10, "hi"]; - - writefln(); - - foreach(k, v; test) - writefln("test[", k, "] = ", v); - - writefln(); - - foreach(k, v; test, "reverse") - writefln("test[", k, "] = ", v); - - writefln(); - - foreach(k, v; "hello") - writefln("str[", k, "] = ", v); - - writefln(); - - foreach(k, v; "hello", "reverse") - writefln("str[", k, "] = ", v); - - writefln(); -} - -// Testing upvalues in for loops. -{ - local arr = array.new(10); - - for(i : 0 .. 10) - arr[i] = function() { return i; }; - - writefln("This should be the values 0 through 9:"); - - foreach(func; arr) - writefln(func()); - - writefln(); -} - -// Testing nested functions. -{ - function outer() - { - local x = 3; - - function inner() - { - x++; - writefln("inner x: ", x); - } - - writefln("outer x: ", x); - inner(); - writefln("outer x: ", x); - - return inner; - } - - local func = outer(); - func(); - - writefln(); -} - -// Testing Exceptions. -{ - function thrower(x) - { - if(x >= 3) - throw "Sorry, x is too big for me!"; - } - - function tryCatch(iterations) - { - try - { - for(i : 0 .. iterations) - { - writefln("tryCatch: ", i); - thrower(i); - } - } - catch(e) - { - writefln("tryCatch caught: ", e); - throw e; - } - finally - writefln("tryCatch finally"); - } - - try - { - tryCatch(2); - tryCatch(5); - } - catch(e) - writefln("caught: ", e); - - writefln(); -} - -// Testing arrays. -{ - local array = [7, 9, 2, 3, 6]; - - array.sort(); - - foreach(i, v; array) - writefln("arr[", i, "] = ", v); - - array ~= ["foo", "far"]; - - writefln(); - - foreach(i, v; array) - writefln("arr[", i, "] = ", v); - - writefln(); -} - -// Testing vararg functions. -{ - function vargs(vararg) - { - local args = [vararg]; - - writefln("num varargs: ", #args); - - foreach(i, v; args) - writefln("args[", i, "] = ", v); - } - - vargs(); - - writefln(); - - vargs(2, 3, 5, "foo", "bar"); - - writefln(); -} - -// Testing switches. -{ - foreach(v; ["hi", "bye", "foo"]) - { - switch(v) - { - case "hi": - writefln("switched to hi"); - break; - - case "bye": - writefln("switched to bye"); - break; - - default: - writefln("switched to something else"); - break; - } - } - - writefln(); - - foreach(v; [null, false, 1, 2.3, 'x', "hi"]) - { - switch(v) - { - case null: writefln("null"); break; - case false: writefln("false"); break; - case 1: writefln("1"); break; - case 2.3: writefln("2.3"); break; - case 'x': writefln("x"); break; - case "hi": writefln("hi"); break; - } - } - - writefln(); - - class A - { - mValue; - - this(value) - { - mValue = value; - } - - function opCmp(other) - { - assert(other as A); - return mValue <=> other.mValue; - } - } - - local a1 = A(1); - local a2 = A(2); - local a3 = A(3); - - for(s : 1 .. 4) - { - local ss = A(s); - - switch(ss) - { - case a1: - writefln(1); - break; - - case a2: - writefln(2); - break; - - case a3: - writefln(3); - break; - } - } -} \ No newline at end of file diff --git a/tests/examplefiles/smarty_example.html b/tests/examplefiles/smarty_example.html deleted file mode 100644 index cf4ffdc3..00000000 --- a/tests/examplefiles/smarty_example.html +++ /dev/null @@ -1,209 +0,0 @@ -{php} - include "some/php/file.php"; - - foreach ($rows as $row) { - echo $row; - } -{/php} - -{* smarty comment *} - - {serendipity_hookPlugin hook="entries_header" addData="$entry_id"} - - {foreach from=$entries item="dategroup"} - - {foreachelse} - {if not $plugin_clean_page} - {$CONST.NO_ENTRIES_TO_PRINT} - {/if} - {/foreach} - -{if $footer_info} - {/if} - {serendipity_hookPlugin hook="entries_footer"} - diff --git a/tests/examplefiles/source.lgt b/tests/examplefiles/source.lgt deleted file mode 100644 index ce5abced..00000000 --- a/tests/examplefiles/source.lgt +++ /dev/null @@ -1,343 +0,0 @@ - -% this is a single-line comment - -/* -this is -a block -comment -*/ - - -:- encoding(some_encoding). -:- op(Precedence, Associativity, Operator). - - -:- object(prototype, - implements(protocol), - imports(category), - extends(parent)). - - :- info([ - version is 1.0, - author is 'Paulo Moura', - date is 2008/5/1, - comment is 'Sample prototype for testing syntax coloring.']). - :- threaded. - :- synchronized. - :- dynamic. - :- initialization(some_goal(X, Y)). - :- calls(some_other_protocol). - :- uses(another_object). - - :- alias(set, member/2, set_member/2). - :- alias(words, singular//0, peculiar//0). - - :- uses(list, [append/3, member/2]). - :- uses(queues, [new/1::new_queue/1]). - - :- public(aaa/2). - :- meta_predicate(aaa(::, *)). - :- discontiguous(aaa/2). - :- mode(aaa(+callable, ?integer), zero_or_one). - :- info(position/2, [ - comment is 'Predicate brief description.', - arguments is ['Arg1'-'Arg1 description', 'Arg2'-'Arg2 description']]). - - :- protected(bbb/2). - :- synchronized(bbb/2). - :- mode(bbb(+integer, -float), one). - :- info(bbb/2, [ - comment is 'Predicate brief description.', - argnames is ['Arg1', 'Arg2']]). - - :- private(ccc/2). - :- dynamic(ccc/2). - :- mode(ccc(@atom, ?atom), one_or_more). - :- info(ccc/2, [ - comment is 'Predicate brief description.', - argnames is ['Arg1', 'Arg2']]). - - enumerating_entities(Object, Protocol, Category) :- - current_category(Category), - current_object(Object), - current_protocol(Protocol). - - enumerating_properties :- - category_property(Category, Property), - object_property(Object, Property), - protocol_property(Protocol, Property). - - creating_entities(Object, Protocol, Category) :- - create_category(Category, Relations, Directives, Clauses), - create_object(Object, Relations, Directives, Clauses), - create_protocol(Protocol, Relations, Directives). - - abolishing_entities(Object, Protocol, Category) :- - abolish_category(Category), - abolish_object(Object), - abolish_protocol(Protocol). - - entity_relations :- - extends_object(Prototype, Parent, Scope), - extends_protocol(Protocol1, Protocol2, Scope), - extends_category(Category1, Category2, Scope), - implements_protocol(Object, Protocol, Scope), - imports_category(Object, Category, Scope), - instantiates_class(Instance, Class, Scope), - specializes_class(Class, Superclass, Scope), - complements_object(Category, Object). - - event_handling :- - abolish_events(Event, Object, Message, Sender, Monitor), - current_event(Event, Object, Message, Sender, Monitor), - define_events(Event, Object, Message, Sender, Monitor). - - multi_threading :- - threaded(Goals), - threaded_call(Goal), - threaded_once(Goal), - threaded_ignore(Goal), - threaded_exit(Goal), - threaded_peek(Goal), - threaded_wait(Goal), - threaded_notify(Notification). - - compiling_and_loading :- - logtalk_compile(File, Options), - logtalk_load(File, Options), - logtalk_library_path(Library, Path). - - flags :- - current_logtalk_flag(Flag, Value), - set_logtalk_flag(Flag, Value). - - execution_context_methods :- - parameter(N, Parameter), - self(Self), - sender(Sender), - this(This). - - reflection_methods :- - current_predicate(Predicate), - predicate_property(Predicate, Property). - - database_methods :- - abolish(Functor/Arity), - asserta(Clause), - assertz(Clause), - clause(Head, Body), - retract(Clause), - retractall(Head). - - meta_call_methods :- - call(Goal). - - all_solutions_methods :- - bagof(Term, Goal, List), - findall(Term, Goal, List), - forall(Generate, Test), - setof(Term, Goal, List). - - event_handler_methods :- - before(Object, Message, Sender), - after(Object, Message, Sender). - - dcg_rules_parsing_methods :- - phrase(NonTerminal, Input, Rest). - - term_expansion_methods :- - expand_term(Term, Expanded), - term_expansion(Term, Expanded), - goal_expansion(Goal, Expanded). - - message_sending :- - Object::Message, - ::Message, - ^^Message. - - calling_external_code :- - {goal1, goal2, goal3}. - - context_switching_calls :- - Object< - Then - ; Else - ). - - numbers :- - X is 13, - Y is 13.13, - Z is 13.13e-23, - C1 is 0'A, C2 is 0'', C3 is 0'", - B is 0b1011101, - O is 0o1234560, - H is 0x1234567890abcDEF. - - functions :- - A is atan(3.14) + sin(0.77) - cos(123.23), - B is sign(-12) * abs(35/78), - C is truncate(3.14) + round(-7.8) - ceiling(111.88), - D is exp(3.8) - log(123.98) / sqrt(33) * 23 ** 4, - E is rem(3, 2) + mod(5, 3) * 2 rem 2 // 5 mod 3, - F is float_fractional_part(3.14) + float_integer_part(3.14), - G is float(33) + floor(99.99). - - bitwise :- - A is 16 >> 2, - B is 16 << 2, - C is 10 /\ 12, - D is 10 \/ 12, - E is \ 10. - - term_unification :- - Term1 = Term2, - Term1 \= Term2, - unify_with_occurs_check(Term1, Term2). - - term_testing :- - atom(Atom), - atomic(Atomic), - integer(Integer), - float(Float), - compound(Term), - nonvar(Term), - var(Term), - number(Number). - - term_comparison :- - Term1 == Term2, - Term1 \== Term2, - Term1 @< Term2, - Term1 @=< Term2, - Term1 @>= Term2, - Term1 @> Term2. - - term_creation_and_decomposition :- - functor(Term, Functor, Arity), - arg(N, Term, Arg), - Term =.. [Functor| Args], - copy_term(Term, Copy). - - arithemtic_evaluation :- - X is Expression. - - arithemtic_comparison :- - Exp1 =:= Exp2, - Exp1 =\= Exp2, - Exp1 < Exp2, - Exp1 =< Exp2, - Exp1 > Exp2, - Exp1 >= Exp2. - - stream_selection_and_control :- - current_input(Stream), - current_output(Stream), - set_input(Stream), - set_output(Stream), - open(Source, Mode, Stream, Options), - close(Stream), - flush_output(Stream), - stream_property(Stream, Property), - at_end_of_stream(Stream), - set_stream_position(Stream, Position), - flush_output, - at_end_of_stream. - - character_input_output :- - get_char(Char), - get_code(Code), - peek_char(Char), - peek_code(Code), - put_char(Char), - put_code(Code), - nl(Stream), - nl. - - byte_input_output :- - get_byte(Byte), - peek_byte(Byte), - put_byte(Byte). - - term_input_output :- - read(Term), - read_term(Term), - write(Term), - write(Term), - write_canonical(Term), - write_term(Stream, Term, Options), - current_op(Precedence, Associativity, Operator), - op(Precedence, Associativity, Operator), - current_char_conversion(InChar, OutChar), - char_conversion(InChar, OutChar). - - logic_and_control :- - \+ Goal, - once(Goal), - repeat, - !. - - atomic_term_processing :- - atom_length(Atom, Length), - atom_chars(Atom, Chars), - atom_codes(Atom, Codes), - atom_concat(Atom1, Atom2, Atom), - sub_atom(Atom, Before, Length, After, SubAtom), - char_code(Char, Code), - number_chars(Number, Chars), - number_codes(Number, Codes). - - implementation_defined_hooks :- - current_prolog_flag(Flag, Value), - set_prolog_flag(Flag, Value), - halt(ExitCode), - halt. - - number(C) --> "+", number(C). - number(C) --> "-", number(X), {C is -X}. - number(X) --> [C], {0'0 =< C, C =< 0'9, X is C - 0'0}. - -:- end_object. - - - -:- object(class, - implements(protocol), - imports(category), - instantiates(metaclass), - specializes(superclass)). - - -:- end_object. - - - -:- object(parametric(_Par1, _Par2), - implements(protocol), - imports(category), - extends(parent(_Par))). - - -:- end_object. - - - -:- category(category, - implements(protocol), - extends(other_category)). - - -:- end_category. - - - -:- protocol(extended, - extends(minimal)). - - -:- end_protocol. diff --git a/tests/examplefiles/sources.list b/tests/examplefiles/sources.list deleted file mode 100644 index 3f363352..00000000 --- a/tests/examplefiles/sources.list +++ /dev/null @@ -1,62 +0,0 @@ -## CD ROM -deb cdrom:[Xubuntu 6.06.1 _Dapper Drake_ - Release i386 (20060807)]/ dapper main restricted - -deb http://archive.ubuntu.com/ubuntu/ dapper main restricted -deb-src http://archive.ubuntu.com/ubuntu/ dapper main restricted - -deb http://foo.com/$(ARCH)/ main foo - -## Major bug fix updates produced after the final release of the -## distribution. -deb http://archive.ubuntu.com/ubuntu/ dapper-updates main restricted -deb-src http://archive.ubuntu.com/ubuntu/ dapper-updates main restricted - -## Uncomment the following two lines to add software from the 'universe' -## repository. -## N.B. software from this repository is ENTIRELY UNSUPPORTED by the Ubuntu -## team, and may not be under a free licence. Please satisfy yourself as to -## your rights to use the software. Also, please note that software in -## universe WILL NOT receive any review or updates from the Ubuntu security -## team. -deb http://archive.ubuntu.com/ubuntu/ dapper universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ dapper universe multiverse - -## Uncomment the following two lines to add software from the 'backports' -## repository. -## N.B. software from this repository may not have been tested as -## extensively as that contained in the main release, although it includes -## newer versions of some applications which may provide useful features. -## Also, please note that software in backports WILL NOT receive any review -## or updates from the Ubuntu security team. -deb http://archive.ubuntu.com/ubuntu/ dapper-backports main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ dapper-backports main restricted universe multiverse - -deb http://security.ubuntu.com/ubuntu dapper-security main restricted -deb-src http://security.ubuntu.com/ubuntu dapper-security main restricted -deb http://security.ubuntu.com/ubuntu dapper-security universe multiverse -deb-src http://security.ubuntu.com/ubuntu dapper-security universe multiverse - -## dapper-commercial by canonical -## currently has realplay (realplayer 10) and opera (opera 9) -deb http://archive.canonical.com/ubuntu dapper-commercial main - -## Bleeding edge wine repository for Dapper -## only uncomment it if you need it -## deb http://wine.budgetdedicated.com/apt dapper main -## deb-src http://wine.budgetdedicated.com/apt dapper main - -## skype -## only uncomment it if you need it -## deb http://download.skype.com/linux/repos/debian/ stable non-free - -deb http://de.archive.ubuntu.com/ubuntu/ edgy main restricted multiverse universe - -deb http://de.archive.ubuntu.com/ubuntu/ edgy-updates main restricted multiverse universe - -deb http://de.archive.ubuntu.com/ubuntu/ edgy-backports main restricted universe multiverse - -deb http://security.ubuntu.com/ubuntu edgy-security main restricted universe multiverse - -deb http://wine.budgetdedicated.com/apt edgy main - -deb http://archive.czessi.net/ubuntu edgy main restricted universe multiverse i18n-de diff --git a/tests/examplefiles/sparql.rq b/tests/examplefiles/sparql.rq deleted file mode 100644 index d979d203..00000000 --- a/tests/examplefiles/sparql.rq +++ /dev/null @@ -1,48 +0,0 @@ -# This is a test SPARQL query - -BASE - -PREFIX foaf: -PREFIX ex: -PREFIX xsd: -PREFIX dcterms: - -SELECT ?person (COUNT(?nick) AS ?nickCount) { - <#jonny> foaf:knows ?person . - ?person a foaf:Person . - ?person foaf:firstName "Freddy" . - ?person foaf:lastName "Smith" . - # predicate-object list - ?person foaf:nick ?nick ; - foaf:age "21"^^xsd:int ; # typed literal - ex:title 'Mr' ; # single-quoted string - ex:width 2 ; # integer - ex:height 1.80 ; # float - ex:distanceToSun 1.4e8 ; # float with exponent - ex:ownsACat true ; - ex:catName "Kitty", "Kitty_" ; # object list - # some other float values - ex:float1 .125 ; - ex:float2 +2.5e10 ; - ex:float3 2.5e+10 ; - ex:float4 -1.e-10 ; - ex:float5 .0e1 ; - ex:float6 5e11 ; - ex:float7 1. ; - ex:aUnicodeÀExample "somestring" ; - ex:catName "Kitty", "Kitty_" ; # object list - ex:escape "\n\u00c0\U00010000"; - ex:catAge ?catage ; - dcterms:description "Someone with a cat called \"cat\"."@en . # language tag - ?person foaf:knows _:b0 . - _:b0 foaf:knows [ _:b1 a foaf:Person; foaf:name "Jonny" . ] . - OPTIONAL { ?person foaf:isPrimaryTopicOf ?page } - OPTIONAL { ?person foaf:name ?name - { ?person foaf:depiction ?img } - UNION - { ?person foaf:firstName ?firstN } } - FILTER ( bound(?page) || bound(?img) || bound(?firstN) ) - FILTER ( ?catage < 101 && ?catage > 9 && ?catage >= 10 && ?catage <= 100 && ?catage != 20 ) -} -GROUP BY ?person -ORDER BY ?img ASC(?firstN) DESC(?page) diff --git a/tests/examplefiles/sphere.pov b/tests/examplefiles/sphere.pov deleted file mode 100644 index 847ed451..00000000 --- a/tests/examplefiles/sphere.pov +++ /dev/null @@ -1,18 +0,0 @@ -#include "colors.inc" - -background { color Cyan } - -camera { - location <0, 2, -3> - look_at <0, 1, 2> -} - -sphere { - <0, 1, 2>, 2 - texture { - pigment { color Yellow } - } -} - -light_source { <2, 4, -3> color White} - diff --git a/tests/examplefiles/sqlite3.sqlite3-console b/tests/examplefiles/sqlite3.sqlite3-console deleted file mode 100644 index 3ec27135..00000000 --- a/tests/examplefiles/sqlite3.sqlite3-console +++ /dev/null @@ -1,27 +0,0 @@ -SQLite version 3.4.2 -Enter ".help" for instructions -sqlite> .schema -CREATE TABLE paste (paste_id integer, code text, parsed_code text, pub_date -varchar(24), language varchar(64), parent_id integer, url varchar(128)); -CREATE TABLE vars (key varchar(24), value varchar(128)); -sqlite> a ' - ...> ' - ...> ; -SQL error: near "a": syntax error -sqlite> %; -SQL error: near "%": syntax error -sqlite> select count(language), language from paste group by language order - ...> by count(language) desc; -144|python -76|text -22|pycon -9|ruby -7|c -7|js -6|html+django -4|html -4|tex -2|html+php -1|cpp -1|scheme -sqlite> diff --git a/tests/examplefiles/squid.conf b/tests/examplefiles/squid.conf deleted file mode 100644 index 833d4fca..00000000 --- a/tests/examplefiles/squid.conf +++ /dev/null @@ -1,30 +0,0 @@ -# Some multiline comments - -acl manager proto cache_object -acl localhost src 127.0.0.1/32 ::1 -acl to_localhost dst 127.0.0.0/8 0.0.0.0/32 ::1 -acl SSL_ports port 443 -acl Safe_ports port 80 # http -acl Safe_ports port 21 # ftp -acl Safe_ports port 443 # https -acl Safe_ports port 70 # gopher -acl Safe_ports port 210 # wais -acl Safe_ports port 1025-65535 # unregistered ports -acl Safe_ports port 280 # http-mgmt -acl Safe_ports port 488 # gss-http -acl Safe_ports port 591 # filemaker -acl Safe_ports port 777 # multiling http -acl CONNECT method CONNECT -http_access allow manager localhost -http_access deny manager -http_access deny !Safe_ports -http_access deny CONNECT !SSL_ports -http_access allow localhost -http_access deny all -http_port 3128 -hierarchy_stoplist cgi-bin ? -coredump_dir /var/spool/squid3 -refresh_pattern ^ftp: 1440 20% 10080 -refresh_pattern ^gopher: 1440 0% 1440 -refresh_pattern -i (/cgi-bin/|\?) 0 0% 0 -refresh_pattern . 0 20% 4320 diff --git a/tests/examplefiles/string.jl b/tests/examplefiles/string.jl deleted file mode 100644 index 67bf6c70..00000000 --- a/tests/examplefiles/string.jl +++ /dev/null @@ -1,1031 +0,0 @@ -## core string functions ## - -length(s::String) = error("you must implement length(",typeof(s),")") -next(s::String, i::Int) = error("you must implement next(",typeof(s),",Int)") -next(s::DirectIndexString, i::Int) = (s[i],i+1) -next(s::String, i::Integer) = next(s,int(i)) - -## generic supplied functions ## - -start(s::String) = 1 -done(s::String,i) = (i > length(s)) -isempty(s::String) = done(s,start(s)) -ref(s::String, i::Int) = next(s,i)[1] -ref(s::String, i::Integer) = s[int(i)] -ref(s::String, x::Real) = s[iround(x)] -ref{T<:Integer}(s::String, r::Range1{T}) = s[int(first(r)):int(last(r))] - -symbol(s::String) = symbol(cstring(s)) -string(s::String) = s - -print(s::String) = for c=s; print(c); end -print(x...) = for i=x; print(i); end -println(args...) = print(args..., '\n') - -show(s::String) = print_quoted(s) - -(*)(s::String...) = strcat(s...) -(^)(s::String, r::Integer) = repeat(s,r) - -size(s::String) = (length(s),) -size(s::String, d::Integer) = d==1 ? length(s) : - error("in size: dimension ",d," out of range") - -strlen(s::DirectIndexString) = length(s) -function strlen(s::String) - i = start(s) - if done(s,i) - return 0 - end - n = 1 - while true - c, j = next(s,i) - if done(s,j) - return n - end - n += 1 - i = j - end -end - -isvalid(s::DirectIndexString, i::Integer) = (start(s) <= i <= length(s)) -function isvalid(s::String, i::Integer) - try - next(s,i) - true - catch - false - end -end - -prevind(s::DirectIndexString, i::Integer) = i-1 -thisind(s::DirectIndexString, i::Integer) = i -nextind(s::DirectIndexString, i::Integer) = i+1 - -prevind(s::String, i::Integer) = thisind(s,thisind(s,i)-1) - -function thisind(s::String, i::Integer) - for j = i:-1:1 - if isvalid(s,j) - return j - end - end - return 0 # out of range -end - -function nextind(s::String, i::Integer) - for j = i+1:length(s) - if isvalid(s,j) - return j - end - end - length(s)+1 # out of range -end - -ind2chr(s::DirectIndexString, i::Integer) = i -chr2ind(s::DirectIndexString, i::Integer) = i - -function ind2chr(s::String, i::Integer) - s[i] # throws error if invalid - j = 1 - k = start(s) - while true - c, l = next(s,k) - if i <= k - return j - end - j += 1 - k = l - end -end - -function chr2ind(s::String, i::Integer) - if i < 1 - return i - end - j = 1 - k = start(s) - while true - c, l = next(s,k) - if i == j - return k - end - j += 1 - k = l - end -end - -function strchr(s::String, c::Char, i::Integer) - i = nextind(s,i) - while !done(s,i) - d, j = next(s,i) - if c == d - return i - end - i = j - end - return 0 -end -strchr(s::String, c::Char) = strchr(s, c, start(s)) -contains(s::String, c::Char) = (strchr(s,c)!=0) - -function chars(s::String) - cx = Array(Char,strlen(s)) - i = 0 - for c in s - cx[i += 1] = c - end - return cx -end - -function cmp(a::String, b::String) - i = start(a) - j = start(b) - while !done(a,i) && !done(b,i) - c, i = next(a,i) - d, j = next(b,j) - if c != d - return c < d ? -1 : +1 - end - end - done(a,i) && !done(b,j) ? -1 : - !done(a,i) && done(b,j) ? +1 : 0 -end - -isequal(a::String, b::String) = cmp(a,b) == 0 -isless(a::String, b::String) = cmp(a,b) < 0 - -# faster comparisons for byte strings - -cmp(a::ByteString, b::ByteString) = lexcmp(a.data, b.data) -isequal(a::ByteString, b::ByteString) = length(a)==length(b) && cmp(a,b)==0 - -## character column width function ## - -charwidth(c::Char) = max(0,int(ccall(:wcwidth, Int32, (Char,), c))) -strwidth(s::String) = (w=0; for c in s; w += charwidth(c); end; w) -strwidth(s::ByteString) = ccall(:u8_strwidth, Int, (Ptr{Uint8},), s.data) -# TODO: implement and use u8_strnwidth that takes a length argument - -## generic string uses only length and next ## - -type GenericString <: String - string::String -end - -length(s::GenericString) = length(s.string) -next(s::GenericString, i::Int) = next(s.string, i) - -## plain old character arrays ## - -type CharString <: String - chars::Array{Char,1} - - CharString(a::Array{Char,1}) = new(a) - CharString(c::Char...) = new([ c[i] | i=1:length(c) ]) -end -CharString(x...) = CharString(map(char,x)...) - -next(s::CharString, i::Int) = (s.chars[i], i+1) -length(s::CharString) = length(s.chars) -strlen(s::CharString) = length(s) - -string(c::Char) = CharString(c) -string(c::Char, x::Char...) = CharString(c, x...) - -## substrings reference original strings ## - -type SubString <: String - string::String - offset::Int - length::Int - - SubString(s::String, i::Int, j::Int) = new(s, i-1, j-i+1) - SubString(s::SubString, i::Int, j::Int) = - new(s.string, i-1+s.offset, j-i+1) -end -SubString(s::String, i::Integer, j::Integer) = SubString(s, int(i), int(j)) - -function next(s::SubString, i::Int) - if i < 1 || i > s.length - error("string index out of bounds") - end - c, i = next(s.string, i+s.offset) - c, i-s.offset -end - -length(s::SubString) = s.length -# TODO: strlen(s::SubString) = ?? -# default implementation will work but it's slow -# can this be delegated efficiently somehow? -# that may require additional string interfaces - -function ref(s::String, r::Range1{Int}) - if first(r) < 1 || length(s) < last(r) - error("in substring slice: index out of range") - end - SubString(s, first(r), last(r)) -end - -## efficient representation of repeated strings ## - -type RepString <: String - string::String - repeat::Integer -end - -length(s::RepString) = length(s.string)*s.repeat -strlen(s::RepString) = strlen(s.string)*s.repeat - -function next(s::RepString, i::Int) - if i < 1 || i > length(s) - error("string index out of bounds") - end - j = mod1(i,length(s.string)) - c, k = next(s.string, j) - c, k-j+i -end - -function repeat(s::String, r::Integer) - r < 0 ? error("can't repeat a string ",r," times") : - r == 0 ? "" : - r == 1 ? s : - RepString(s,r) -end - -## reversed strings without data movement ## - -type RevString <: String - string::String -end - -length(s::RevString) = length(s.string) -strlen(s::RevString) = strlen(s.string) - -start(s::RevString) = (n=length(s); n-thisind(s.string,n)+1) -function next(s::RevString, i::Int) - n = length(s); j = n-i+1 - (s.string[j], n-thisind(s.string,j-1)+1) -end - -reverse(s::String) = RevString(s) -reverse(s::RevString) = s.string - -## ropes for efficient concatenation, etc. ## - -# Idea: instead of this standard binary tree structure, -# how about we keep an array of substrings, with an -# offset array. We can do binary search on the offset -# array so we get O(log(n)) indexing time still, but we -# can compute the offsets lazily and avoid all the -# futzing around while the string is being constructed. - -type RopeString <: String - head::String - tail::String - depth::Int32 - length::Int - - RopeString(h::RopeString, t::RopeString) = - depth(h.tail) + depth(t) < depth(h.head) ? - RopeString(h.head, RopeString(h.tail, t)) : - new(h, t, max(h.depth,t.depth)+1, length(h)+length(t)) - - RopeString(h::RopeString, t::String) = - depth(h.tail) < depth(h.head) ? - RopeString(h.head, RopeString(h.tail, t)) : - new(h, t, h.depth+1, length(h)+length(t)) - - RopeString(h::String, t::RopeString) = - depth(t.head) < depth(t.tail) ? - RopeString(RopeString(h, t.head), t.tail) : - new(h, t, t.depth+1, length(h)+length(t)) - - RopeString(h::String, t::String) = - new(h, t, 1, length(h)+length(t)) -end - -depth(s::String) = 0 -depth(s::RopeString) = s.depth - -function next(s::RopeString, i::Int) - if i <= length(s.head) - return next(s.head, i) - else - c, j = next(s.tail, i-length(s.head)) - return c, j+length(s.head) - end -end - -length(s::RopeString) = s.length -strlen(s::RopeString) = strlen(s.head) + strlen(s.tail) - -strcat() = "" -strcat(s::String) = s -strcat(x...) = strcat(map(string,x)...) -strcat(s::String, t::String...) = - (t = strcat(t...); isempty(s) ? t : isempty(t) ? s : RopeString(s, t)) - -print(s::RopeString) = print(s.head, s.tail) - -## transformed strings ## - -type TransformedString <: String - transform::Function - string::String -end - -length(s::TransformedString) = length(s.string) -strlen(s::TransformedString) = strlen(s.string) - -function next(s::TransformedString, i::Int) - c, j = next(s.string,i) - c = s.transform(c, i) - return c, j -end - -## uppercase and lowercase transformations ## - -uppercase(c::Char) = ccall(:towupper, Char, (Char,), c) -lowercase(c::Char) = ccall(:towlower, Char, (Char,), c) - -uppercase(s::String) = TransformedString((c,i)->uppercase(c), s) -lowercase(s::String) = TransformedString((c,i)->lowercase(c), s) - -ucfirst(s::String) = TransformedString((c,i)->i==1 ? uppercase(c) : c, s) -lcfirst(s::String) = TransformedString((c,i)->i==1 ? lowercase(c) : c, s) - -const uc = uppercase -const lc = lowercase - -## string map ## - -function map(f::Function, s::String) - out = memio(length(s)) - for c in s - write(out, f(c)::Char) - end - takebuf_string(out) -end - -## conversion of general objects to strings ## - -string(x) = print_to_string(show, x) -cstring(x...) = print_to_string(print, x...) - -function cstring(p::Ptr{Uint8}) - p == C_NULL ? error("cannot convert NULL to string") : - ccall(:jl_cstr_to_string, Any, (Ptr{Uint8},), p)::ByteString -end - -## string promotion rules ## - -promote_rule(::Type{UTF8String} , ::Type{ASCIIString}) = UTF8String -promote_rule(::Type{UTF8String} , ::Type{CharString} ) = UTF8String -promote_rule(::Type{ASCIIString}, ::Type{CharString} ) = UTF8String - -## printing literal quoted string data ## - -# TODO: this is really the inverse of print_unbackslashed - -function print_quoted_literal(s::String) - print('"') - for c = s; c == '"' ? print("\\\"") : print(c); end - print('"') -end - -## string escaping & unescaping ## - -escape_nul(s::String, i::Int) = - !done(s,i) && '0' <= next(s,i)[1] <= '7' ? L"\x00" : L"\0" - -is_hex_digit(c::Char) = '0'<=c<='9' || 'a'<=c<='f' || 'A'<=c<='F' -need_full_hex(s::String, i::Int) = !done(s,i) && is_hex_digit(next(s,i)[1]) - -function print_escaped(s::String, esc::String) - i = start(s) - while !done(s,i) - c, j = next(s,i) - c == '\0' ? print(escape_nul(s,j)) : - c == '\e' ? print(L"\e") : - c == '\\' ? print("\\\\") : - contains(esc,c) ? print('\\', c) : - iswprint(c) ? print(c) : - 7 <= c <= 13 ? print('\\', "abtnvfr"[c-6]) : - c <= '\x7f' ? print(L"\x", hex(c, 2)) : - c <= '\uffff' ? print(L"\u", hex(c, need_full_hex(s,j) ? 4 : 2)) : - print(L"\U", hex(c, need_full_hex(s,j) ? 8 : 4)) - i = j - end -end - -escape_string(s::String) = print_to_string(length(s), print_escaped, s, "\"") -print_quoted(s::String) = (print('"'); print_escaped(s, "\"\$"); print('"')) -#" # work around syntax highlighting problem -quote_string(s::String) = print_to_string(length(s)+2, print_quoted, s) - -# bare minimum unescaping function unescapes only given characters - -function print_unescaped_chars(s::String, esc::String) - if !contains(esc,'\\') - esc = strcat("\\", esc) - end - i = start(s) - while !done(s,i) - c, i = next(s,i) - if c == '\\' && !done(s,i) && contains(esc,s[i]) - c, i = next(s,i) - end - print(c) - end -end - -unescape_chars(s::String, esc::String) = - print_to_string(length(s), print_unescaped_chars, s, esc) - -# general unescaping of traditional C and Unicode escape sequences - -function print_unescaped(s::String) - i = start(s) - while !done(s,i) - c, i = next(s,i) - if !done(s,i) && c == '\\' - c, i = next(s,i) - if c == 'x' || c == 'u' || c == 'U' - n = k = 0 - m = c == 'x' ? 2 : - c == 'u' ? 4 : 8 - while (k+=1) <= m && !done(s,i) - c, j = next(s,i) - n = '0' <= c <= '9' ? n<<4 + c-'0' : - 'a' <= c <= 'f' ? n<<4 + c-'a'+10 : - 'A' <= c <= 'F' ? n<<4 + c-'A'+10 : break - i = j - end - if k == 1 - error("\\x used with no following hex digits") - end - if m == 2 # \x escape sequence - write(uint8(n)) - else - print(char(n)) - end - elseif '0' <= c <= '7' - k = 1 - n = c-'0' - while (k+=1) <= 3 && !done(s,i) - c, j = next(s,i) - n = '0' <= c <= '7' ? n<<3 + c-'0' : break - i = j - end - if n > 255 - error("octal escape sequence out of range") - end - write(uint8(n)) - else - print(c == 'a' ? '\a' : - c == 'b' ? '\b' : - c == 't' ? '\t' : - c == 'n' ? '\n' : - c == 'v' ? '\v' : - c == 'f' ? '\f' : - c == 'r' ? '\r' : - c == 'e' ? '\e' : c) - end - else - print(c) - end - end -end - -unescape_string(s::String) = print_to_string(length(s), print_unescaped, s) - -## checking UTF-8 & ACSII validity ## - -byte_string_classify(s::ByteString) = - ccall(:u8_isvalid, Int32, (Ptr{Uint8}, Int), s.data, length(s)) - # 0: neither valid ASCII nor UTF-8 - # 1: valid ASCII - # 2: valid UTF-8 - -is_valid_ascii(s::ByteString) = byte_string_classify(s) == 1 -is_valid_utf8 (s::ByteString) = byte_string_classify(s) != 0 - -check_ascii(s::ByteString) = is_valid_ascii(s) ? s : error("invalid ASCII sequence") -check_utf8 (s::ByteString) = is_valid_utf8(s) ? s : error("invalid UTF-8 sequence") - -## string interpolation parsing ## - -function _jl_interp_parse(s::String, unescape::Function, printer::Function) - sx = {} - i = j = start(s) - while !done(s,j) - c, k = next(s,j) - if c == '$' - if !isempty(s[i:j-1]) - push(sx, unescape(s[i:j-1])) - end - ex, j = parseatom(s,k) - push(sx, ex) - i = j - elseif c == '\\' && !done(s,k) - if s[k] == '$' - if !isempty(s[i:j-1]) - push(sx, unescape(s[i:j-1])) - end - i = k - end - c, j = next(s,k) - else - j = k - end - end - if !isempty(s[i:]) - push(sx, unescape(s[i:j-1])) - end - length(sx) == 1 && isa(sx[1],ByteString) ? sx[1] : - expr(:call, :print_to_string, printer, sx...) -end - -_jl_interp_parse(s::String, u::Function) = _jl_interp_parse(s, u, print) -_jl_interp_parse(s::String) = _jl_interp_parse(s, x->check_utf8(unescape_string(x))) - -function _jl_interp_parse_bytes(s::String) - writer(x...) = for w=x; write(w); end - _jl_interp_parse(s, unescape_string, writer) -end - -## core string macros ## - -macro str(s); _jl_interp_parse(s); end -macro S_str(s); _jl_interp_parse(s); end -macro I_str(s); _jl_interp_parse(s, x->unescape_chars(x,"\"")); end -macro E_str(s); check_utf8(unescape_string(s)); end -macro B_str(s); _jl_interp_parse_bytes(s); end -macro b_str(s); ex = _jl_interp_parse_bytes(s); :(($ex).data); end - -## shell-like command parsing ## - -function _jl_shell_parse(s::String, interp::Bool) - - in_single_quotes = false - in_double_quotes = false - - args = {} - arg = {} - i = start(s) - j = i - - function update_arg(x) - if !isa(x,String) || !isempty(x) - push(arg, x) - end - end - function append_arg() - if isempty(arg); arg = {"",}; end - push(args, arg) - arg = {} - end - - while !done(s,j) - c, k = next(s,j) - if !in_single_quotes && !in_double_quotes && iswspace(c) - update_arg(s[i:j-1]) - append_arg() - j = k - while !done(s,j) - c, k = next(s,j) - if !iswspace(c) - i = j - break - end - j = k - end - elseif interp && !in_single_quotes && c == '$' - update_arg(s[i:j-1]); i = k; j = k - if done(s,k) - error("\$ right before end of command") - end - if iswspace(s[k]) - error("space not allowed right after \$") - end - ex, j = parseatom(s,j) - update_arg(ex); i = j - else - if !in_double_quotes && c == '\'' - in_single_quotes = !in_single_quotes - update_arg(s[i:j-1]); i = k - elseif !in_single_quotes && c == '"' - in_double_quotes = !in_double_quotes - update_arg(s[i:j-1]); i = k - elseif c == '\\' - if in_double_quotes - if done(s,k) - error("unterminated double quote") - end - if s[k] == '"' || s[k] == '$' - update_arg(s[i:j-1]); i = k - c, k = next(s,k) - end - elseif !in_single_quotes - if done(s,k) - error("dangling backslash") - end - update_arg(s[i:j-1]); i = k - c, k = next(s,k) - end - end - j = k - end - end - - if in_single_quotes; error("unterminated single quote"); end - if in_double_quotes; error("unterminated double quote"); end - - update_arg(s[i:]) - append_arg() - - if !interp - return args - end - - # construct an expression - exprs = {} - for arg in args - push(exprs, expr(:tuple, arg)) - end - expr(:tuple,exprs) -end -_jl_shell_parse(s::String) = _jl_shell_parse(s,true) - -function shell_split(s::String) - parsed = _jl_shell_parse(s,false) - args = String[] - for arg in parsed - push(args, strcat(arg...)) - end - args -end - -function print_shell_word(word::String) - if isempty(word) - print("''") - end - has_single = false - has_special = false - for c in word - if iswspace(c) || c=='\\' || c=='\'' || c=='"' || c=='$' - has_special = true - if c == '\'' - has_single = true - end - end - end - if !has_special - print(word) - elseif !has_single - print('\'', word, '\'') - else - print('"') - for c in word - if c == '"' || c == '$' - print('\\') - end - print(c) - end - print('"') - end -end - -function print_shell_escaped(cmd::String, args::String...) - print_shell_word(cmd) - for arg in args - print(' ') - print_shell_word(arg) - end -end - -shell_escape(cmd::String, args::String...) = - print_to_string(print_shell_escaped, cmd, args...) - -## interface to parser ## - -function parse(s::String, pos, greedy) - # returns (expr, end_pos). expr is () in case of parse error. - ex, pos = ccall(:jl_parse_string, Any, - (Ptr{Uint8}, Int32, Int32), - cstring(s), pos-1, greedy ? 1:0) - if isa(ex,Expr) && is(ex.head,:error) - throw(ParseError(ex.args[1])) - end - if ex == (); throw(ParseError("end of input")); end - ex, pos+1 # C is zero-based, Julia is 1-based -end - -parse(s::String) = parse(s, 1, true) -parse(s::String, pos) = parse(s, pos, true) -parseatom(s::String) = parse(s, 1, false) -parseatom(s::String, pos) = parse(s, pos, false) - -## miscellaneous string functions ## - -function lpad(s::String, n::Integer, p::String) - m = n - strlen(s) - if m <= 0; return s; end - l = strlen(p) - if l==1 - return p^m * s - end - q = div(m,l) - r = m - q*l - cstring(p^q*p[1:chr2ind(p,r)]*s) -end - -function rpad(s::String, n::Integer, p::String) - m = n - strlen(s) - if m <= 0; return s; end - l = strlen(p) - if l==1 - return s * p^m - end - q = div(m,l) - r = m - q*l - cstring(s*p^q*p[1:chr2ind(p,r)]) -end - -lpad(s, n::Integer, p) = lpad(string(s), n, string(p)) -rpad(s, n::Integer, p) = rpad(string(s), n, string(p)) - -lpad(s, n::Integer) = lpad(string(s), n, " ") -rpad(s, n::Integer) = rpad(string(s), n, " ") - -function split(s::String, delims, include_empty::Bool) - i = 1 - strs = String[] - len = length(s) - while true - tokstart = tokend = i - while !done(s,i) - (c,i) = next(s,i) - if contains(delims, c) - break - end - tokend = i - end - tok = s[tokstart:(tokend-1)] - if include_empty || !isempty(tok) - push(strs, tok) - end - if !((i <= len) || (i==len+1 && tokend!=i)) - break - end - end - strs -end - -split(s::String) = split(s, (' ','\t','\n','\v','\f','\r'), false) -split(s::String, x) = split(s, x, true) -split(s::String, x::Char, incl::Bool) = split(s, (x,), incl) - -function print_joined(strings, delim, last) - i = start(strings) - if done(strings,i) - return - end - str, i = next(strings,i) - print(str) - while !done(strings,i) - str, i = next(strings,i) - print(done(strings,i) ? last : delim) - print(str) - end -end - -function print_joined(strings, delim) - i = start(strings) - while !done(strings,i) - str, i = next(strings,i) - print(str) - if !done(strings,i) - print(delim) - end - end -end -print_joined(strings) = print_joined(strings, "") - -join(args...) = print_to_string(print_joined, args...) - -chop(s::String) = s[1:thisind(s,length(s))-1] -chomp(s::String) = (i=thisind(s,length(s)); s[i]=='\n' ? s[1:i-1] : s) -chomp(s::ByteString) = s.data[end]==0x0a ? s[1:end-1] : s - -function lstrip(s::String) - i = start(s) - while !done(s,i) - c, j = next(s,i) - if !iswspace(c) - return s[i:end] - end - i = j - end - "" -end - -function rstrip(s::String) - r = reverse(s) - i = start(r) - while !done(r,i) - c, j = next(r,i) - if !iswspace(c) - return s[1:end-i+1] - end - i = j - end - "" -end - -strip(s::String) = lstrip(rstrip(s)) - -## string to integer functions ## - -function parse_int{T<:Integer}(::Type{T}, s::String, base::Integer) - if !(2 <= base <= 36); error("invalid base: ",base); end - i = start(s) - if done(s,i) - error("premature end of integer (in ",show_to_string(s),")") - end - c,i = next(s,i) - sgn = one(T) - if T <: Signed && c == '-' - sgn = -sgn - if done(s,i) - error("premature end of integer (in ",show_to_string(s),")") - end - c,i = next(s,i) - end - base = convert(T,base) - n::T = 0 - while true - d = '0' <= c <= '9' ? c-'0' : - 'A' <= c <= 'Z' ? c-'A'+10 : - 'a' <= c <= 'z' ? c-'a'+10 : typemax(Int) - if d >= base - error(show_to_string(c)," is not a valid digit (in ",show_to_string(s),")") - end - # TODO: overflow detection? - n = n*base + d - if done(s,i) - break - end - c,i = next(s,i) - end - return flipsign(n,sgn) -end - -parse_int(s::String, base::Integer) = parse_int(Int,s,base) -parse_int(T::Type, s::String) = parse_int(T,s,10) -parse_int(s::String) = parse_int(Int,s,10) - -parse_bin(T::Type, s::String) = parse_int(T,s,2) -parse_oct(T::Type, s::String) = parse_int(T,s,8) -parse_hex(T::Type, s::String) = parse_int(T,s,16) - -parse_bin(s::String) = parse_int(Int,s,2) -parse_oct(s::String) = parse_int(Int,s,8) -parse_hex(s::String) = parse_int(Int,s,16) - -integer (s::String) = int(s) -unsigned(s::String) = uint(s) -int (s::String) = parse_int(Int,s) -uint (s::String) = parse_int(Uint,s) -int8 (s::String) = parse_int(Int8,s) -uint8 (s::String) = parse_int(Uint8,s) -int16 (s::String) = parse_int(Int16,s) -uint16 (s::String) = parse_int(Uint16,s) -int32 (s::String) = parse_int(Int32,s) -uint32 (s::String) = parse_int(Uint32,s) -int64 (s::String) = parse_int(Int64,s) -uint64 (s::String) = parse_int(Uint64,s) - -## integer to string functions ## - -const _jl_dig_syms = "0123456789abcdefghijklmnopqrstuvwxyz".data - -function int2str(n::Union(Int64,Uint64), b::Integer, l::Int) - if b < 2 || b > 36; error("int2str: invalid base ", b); end - neg = n < 0 - n = unsigned(abs(n)) - b = convert(typeof(n), b) - ndig = ndigits(n, b) - sz = max(convert(Int, ndig), l) + neg - data = Array(Uint8, sz) - i = sz - if ispow2(b) - digmask = b-1 - shift = trailing_zeros(b) - while i > neg - ch = n & digmask - data[i] = _jl_dig_syms[int(ch)+1] - n >>= shift - i -= 1 - end - else - while i > neg - ch = n % b - data[i] = _jl_dig_syms[int(ch)+1] - n = div(n,b) - i -= 1 - end - end - if neg - data[1] = '-' - end - ASCIIString(data) -end -int2str(n::Integer, b::Integer) = int2str(n, b, 0) -int2str(n::Integer, b::Integer, l::Int) = int2str(int64(n), b, l) - -string(x::Signed) = dec(int64(x)) -cstring(x::Signed) = dec(int64(x)) - -## string to float functions ## - -function float64_isvalid(s::String, out::Array{Float64,1}) - s = cstring(s) - return (ccall(:jl_strtod, Int32, (Ptr{Uint8},Ptr{Float64}), s, out)==0) -end - -function float32_isvalid(s::String, out::Array{Float32,1}) - s = cstring(s) - return (ccall(:jl_strtof, Int32, (Ptr{Uint8},Ptr{Float32}), s, out)==0) -end - -begin - local tmp::Array{Float64,1} = Array(Float64,1) - local tmpf::Array{Float32,1} = Array(Float32,1) - global float64, float32 - function float64(s::String) - if !float64_isvalid(s, tmp) - throw(ArgumentError("float64(String): invalid number format")) - end - return tmp[1] - end - - function float32(s::String) - if !float32_isvalid(s, tmpf) - throw(ArgumentError("float32(String): invalid number format")) - end - return tmpf[1] - end -end - -float(x::String) = float64(x) -parse_float(x::String) = float64(x) -parse_float(::Type{Float64}, x::String) = float64(x) -parse_float(::Type{Float32}, x::String) = float32(x) - -# copying a byte string (generally not needed due to "immutability") - -strcpy{T<:ByteString}(s::T) = T(copy(s.data)) - -# lexicographically compare byte arrays (used by Latin-1 and UTF-8) - -function lexcmp(a::Array{Uint8,1}, b::Array{Uint8,1}) - c = ccall(:memcmp, Int32, (Ptr{Uint8}, Ptr{Uint8}, Uint), - a, b, min(length(a),length(b))) - c < 0 ? -1 : c > 0 ? +1 : cmp(length(a),length(b)) -end - -# find the index of the first occurrence of a byte value in a byte array - -function memchr(a::Array{Uint8,1}, b::Integer) - p = pointer(a) - q = ccall(:memchr, Ptr{Uint8}, (Ptr{Uint8}, Int32, Uint), p, b, length(a)) - q == C_NULL ? 0 : q - p + 1 -end - -# concatenate byte arrays into a single array - -memcat() = Array(Uint8,0) -memcat(a::Array{Uint8,1}) = copy(a) - -function memcat(arrays::Array{Uint8,1}...) - n = 0 - for a in arrays - n += length(a) - end - arr = Array(Uint8, n) - ptr = pointer(arr) - offset = 0 - for a in arrays - ccall(:memcpy, Ptr{Uint8}, (Ptr{Uint8}, Ptr{Uint8}, Uint), - ptr+offset, a, length(a)) - offset += length(a) - end - return arr -end - -# concatenate the data fields of byte strings - -memcat(s::ByteString) = memcat(s.data) -memcat(sx::ByteString...) = memcat(map(s->s.data, sx)...) diff --git a/tests/examplefiles/string_delimiters.d b/tests/examplefiles/string_delimiters.d deleted file mode 100644 index 288aacc2..00000000 --- a/tests/examplefiles/string_delimiters.d +++ /dev/null @@ -1,21 +0,0 @@ -import std.stdio; - -void main() { - // Nesting delimited strings - auto a = q"{foo " {bar} baz}"; - auto b = q"[foo [bar] " baz]"; - auto c = q"(foo " (bar) baz)"; - auto d = q" " baz>"; - // Non-nesting delimited strings - auto e = q"/foo " bar/"; - auto f = q"-Another " string-"; - // "heredoc" strings - auto g = q"FOO - This is a string! -FOO"; - // Token strings (only the q{} should be highlighted as a string) - auto h = q{ - int i; - void foo() { writefln("Hello, world!"); } - }; -} diff --git a/tests/examplefiles/stripheredoc.sh b/tests/examplefiles/stripheredoc.sh deleted file mode 100644 index 33e7ff33..00000000 --- a/tests/examplefiles/stripheredoc.sh +++ /dev/null @@ -1,3 +0,0 @@ -cat <<-EOF - Hello world $PATH - EOF diff --git a/tests/examplefiles/subr.el b/tests/examplefiles/subr.el deleted file mode 100644 index deadca6e..00000000 --- a/tests/examplefiles/subr.el +++ /dev/null @@ -1,4868 +0,0 @@ -;;; subr.el --- basic lisp subroutines for Emacs -*- coding: utf-8; lexical-binding:t -*- - -;; Copyright (C) 1985-1986, 1992, 1994-1995, 1999-2015 Free Software -;; Foundation, Inc. - -;; Maintainer: emacs-devel@gnu.org -;; Keywords: internal -;; Package: emacs - -;; This file is part of GNU Emacs. - -;; GNU Emacs is free software: you can redistribute it and/or modify -;; it under the terms of the GNU General Public License as published by -;; the Free Software Foundation, either version 3 of the License, or -;; (at your option) any later version. - -;; GNU Emacs is distributed in the hope that it will be useful, -;; but WITHOUT ANY WARRANTY; without even the implied warranty of -;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -;; GNU General Public License for more details. - -;; You should have received a copy of the GNU General Public License -;; along with GNU Emacs. If not, see . - -;;; Commentary: - -;;; Code: - -;; Beware: while this file has tag `utf-8', before it's compiled, it gets -;; loaded as "raw-text", so non-ASCII chars won't work right during bootstrap. - -(defmacro declare-function (_fn _file &optional _arglist _fileonly) - "Tell the byte-compiler that function FN is defined, in FILE. -Optional ARGLIST is the argument list used by the function. -The FILE argument is not used by the byte-compiler, but by the -`check-declare' package, which checks that FILE contains a -definition for FN. ARGLIST is used by both the byte-compiler -and `check-declare' to check for consistency. - -FILE can be either a Lisp file (in which case the \".el\" -extension is optional), or a C file. C files are expanded -relative to the Emacs \"src/\" directory. Lisp files are -searched for using `locate-library', and if that fails they are -expanded relative to the location of the file containing the -declaration. A FILE with an \"ext:\" prefix is an external file. -`check-declare' will check such files if they are found, and skip -them without error if they are not. - -FILEONLY non-nil means that `check-declare' will only check that -FILE exists, not that it defines FN. This is intended for -function-definitions that `check-declare' does not recognize, e.g. -`defstruct'. - -To specify a value for FILEONLY without passing an argument list, -set ARGLIST to t. This is necessary because nil means an -empty argument list, rather than an unspecified one. - -Note that for the purposes of `check-declare', this statement -must be the first non-whitespace on a line. - -For more information, see Info node `(elisp)Declaring Functions'." - ;; Does nothing - byte-compile-declare-function does the work. - nil) - - -;;;; Basic Lisp macros. - -(defalias 'not 'null) - -(defmacro noreturn (form) - "Evaluate FORM, expecting it not to return. -If FORM does return, signal an error." - (declare (debug t)) - `(prog1 ,form - (error "Form marked with `noreturn' did return"))) - -(defmacro 1value (form) - "Evaluate FORM, expecting a constant return value. -This is the global do-nothing version. There is also `testcover-1value' -that complains if FORM ever does return differing values." - (declare (debug t)) - form) - -(defmacro def-edebug-spec (symbol spec) - "Set the `edebug-form-spec' property of SYMBOL according to SPEC. -Both SYMBOL and SPEC are unevaluated. The SPEC can be: -0 (instrument no arguments); t (instrument all arguments); -a symbol (naming a function with an Edebug specification); or a list. -The elements of the list describe the argument types; see -Info node `(elisp)Specification List' for details." - `(put (quote ,symbol) 'edebug-form-spec (quote ,spec))) - -(defmacro lambda (&rest cdr) - "Return a lambda expression. -A call of the form (lambda ARGS DOCSTRING INTERACTIVE BODY) is -self-quoting; the result of evaluating the lambda expression is the -expression itself. The lambda expression may then be treated as a -function, i.e., stored as the function value of a symbol, passed to -`funcall' or `mapcar', etc. - -ARGS should take the same form as an argument list for a `defun'. -DOCSTRING is an optional documentation string. - If present, it should describe how to call the function. - But documentation strings are usually not useful in nameless functions. -INTERACTIVE should be a call to the function `interactive', which see. -It may also be omitted. -BODY should be a list of Lisp expressions. - -\(fn ARGS [DOCSTRING] [INTERACTIVE] BODY)" - (declare (doc-string 2) (indent defun) - (debug (&define lambda-list - [&optional stringp] - [&optional ("interactive" interactive)] - def-body))) - ;; Note that this definition should not use backquotes; subr.el should not - ;; depend on backquote.el. - (list 'function (cons 'lambda cdr))) - -(defmacro setq-local (var val) - "Set variable VAR to value VAL in current buffer." - ;; Can't use backquote here, it's too early in the bootstrap. - (list 'set (list 'make-local-variable (list 'quote var)) val)) - -(defmacro defvar-local (var val &optional docstring) - "Define VAR as a buffer-local variable with default value VAL. -Like `defvar' but additionally marks the variable as being automatically -buffer-local wherever it is set." - (declare (debug defvar) (doc-string 3)) - ;; Can't use backquote here, it's too early in the bootstrap. - (list 'progn (list 'defvar var val docstring) - (list 'make-variable-buffer-local (list 'quote var)))) - -(defun apply-partially (fun &rest args) - "Return a function that is a partial application of FUN to ARGS. -ARGS is a list of the first N arguments to pass to FUN. -The result is a new function which does the same as FUN, except that -the first N arguments are fixed at the values with which this function -was called." - (lambda (&rest args2) - (apply fun (append args args2)))) - -(defmacro push (newelt place) - "Add NEWELT to the list stored in the generalized variable PLACE. -This is morally equivalent to (setf PLACE (cons NEWELT PLACE)), -except that PLACE is only evaluated once (after NEWELT)." - (declare (debug (form gv-place))) - (if (symbolp place) - ;; Important special case, to avoid triggering GV too early in - ;; the bootstrap. - (list 'setq place - (list 'cons newelt place)) - (require 'macroexp) - (macroexp-let2 macroexp-copyable-p v newelt - (gv-letplace (getter setter) place - (funcall setter `(cons ,v ,getter)))))) - -(defmacro pop (place) - "Return the first element of PLACE's value, and remove it from the list. -PLACE must be a generalized variable whose value is a list. -If the value is nil, `pop' returns nil but does not actually -change the list." - (declare (debug (gv-place))) - ;; We use `car-safe' here instead of `car' because the behavior is the same - ;; (if it's not a cons cell, the `cdr' would have signaled an error already), - ;; but `car-safe' is total, so the byte-compiler can safely remove it if the - ;; result is not used. - `(car-safe - ,(if (symbolp place) - ;; So we can use `pop' in the bootstrap before `gv' can be used. - (list 'prog1 place (list 'setq place (list 'cdr place))) - (gv-letplace (getter setter) place - (macroexp-let2 macroexp-copyable-p x getter - `(prog1 ,x ,(funcall setter `(cdr ,x)))))))) - -(defmacro when (cond &rest body) - "If COND yields non-nil, do BODY, else return nil. -When COND yields non-nil, eval BODY forms sequentially and return -value of last one, or nil if there are none. - -\(fn COND BODY...)" - (declare (indent 1) (debug t)) - (list 'if cond (cons 'progn body))) - -(defmacro unless (cond &rest body) - "If COND yields nil, do BODY, else return nil. -When COND yields nil, eval BODY forms sequentially and return -value of last one, or nil if there are none. - -\(fn COND BODY...)" - (declare (indent 1) (debug t)) - (cons 'if (cons cond (cons nil body)))) - -(defmacro dolist (spec &rest body) - "Loop over a list. -Evaluate BODY with VAR bound to each car from LIST, in turn. -Then evaluate RESULT to get return value, default nil. - -\(fn (VAR LIST [RESULT]) BODY...)" - (declare (indent 1) (debug ((symbolp form &optional form) body))) - ;; It would be cleaner to create an uninterned symbol, - ;; but that uses a lot more space when many functions in many files - ;; use dolist. - ;; FIXME: This cost disappears in byte-compiled lexical-binding files. - (let ((temp '--dolist-tail--)) - ;; This is not a reliable test, but it does not matter because both - ;; semantics are acceptable, tho one is slightly faster with dynamic - ;; scoping and the other is slightly faster (and has cleaner semantics) - ;; with lexical scoping. - (if lexical-binding - `(let ((,temp ,(nth 1 spec))) - (while ,temp - (let ((,(car spec) (car ,temp))) - ,@body - (setq ,temp (cdr ,temp)))) - ,@(cdr (cdr spec))) - `(let ((,temp ,(nth 1 spec)) - ,(car spec)) - (while ,temp - (setq ,(car spec) (car ,temp)) - ,@body - (setq ,temp (cdr ,temp))) - ,@(if (cdr (cdr spec)) - `((setq ,(car spec) nil) ,@(cdr (cdr spec)))))))) - -(defmacro dotimes (spec &rest body) - "Loop a certain number of times. -Evaluate BODY with VAR bound to successive integers running from 0, -inclusive, to COUNT, exclusive. Then evaluate RESULT to get -the return value (nil if RESULT is omitted). - -\(fn (VAR COUNT [RESULT]) BODY...)" - (declare (indent 1) (debug dolist)) - ;; It would be cleaner to create an uninterned symbol, - ;; but that uses a lot more space when many functions in many files - ;; use dotimes. - ;; FIXME: This cost disappears in byte-compiled lexical-binding files. - (let ((temp '--dotimes-limit--) - (start 0) - (end (nth 1 spec))) - ;; This is not a reliable test, but it does not matter because both - ;; semantics are acceptable, tho one is slightly faster with dynamic - ;; scoping and the other has cleaner semantics. - (if lexical-binding - (let ((counter '--dotimes-counter--)) - `(let ((,temp ,end) - (,counter ,start)) - (while (< ,counter ,temp) - (let ((,(car spec) ,counter)) - ,@body) - (setq ,counter (1+ ,counter))) - ,@(if (cddr spec) - ;; FIXME: This let often leads to "unused var" warnings. - `((let ((,(car spec) ,counter)) ,@(cddr spec)))))) - `(let ((,temp ,end) - (,(car spec) ,start)) - (while (< ,(car spec) ,temp) - ,@body - (setq ,(car spec) (1+ ,(car spec)))) - ,@(cdr (cdr spec)))))) - -(defmacro declare (&rest _specs) - "Do not evaluate any arguments, and return nil. -If a `declare' form appears as the first form in the body of a -`defun' or `defmacro' form, SPECS specifies various additional -information about the function or macro; these go into effect -during the evaluation of the `defun' or `defmacro' form. - -The possible values of SPECS are specified by -`defun-declarations-alist' and `macro-declarations-alist'. - -For more information, see info node `(elisp)Declare Form'." - ;; FIXME: edebug spec should pay attention to defun-declarations-alist. - nil) - -(defmacro ignore-errors (&rest body) - "Execute BODY; if an error occurs, return nil. -Otherwise, return result of last form in BODY. -See also `with-demoted-errors' that does something similar -without silencing all errors." - (declare (debug t) (indent 0)) - `(condition-case nil (progn ,@body) (error nil))) - -;;;; Basic Lisp functions. - -(defun ignore (&rest _ignore) - "Do nothing and return nil. -This function accepts any number of arguments, but ignores them." - (interactive) - nil) - -;; Signal a compile-error if the first arg is missing. -(defun error (&rest args) - "Signal an error, making error message by passing all args to `format'. -In Emacs, the convention is that error messages start with a capital -letter but *do not* end with a period. Please follow this convention -for the sake of consistency." - (declare (advertised-calling-convention (string &rest args) "23.1")) - (signal 'error (list (apply 'format args)))) - -(defun user-error (format &rest args) - "Signal a pilot error, making error message by passing all args to `format'. -In Emacs, the convention is that error messages start with a capital -letter but *do not* end with a period. Please follow this convention -for the sake of consistency. -This is just like `error' except that `user-error's are expected to be the -result of an incorrect manipulation on the part of the user, rather than the -result of an actual problem." - (signal 'user-error (list (apply #'format format args)))) - -(defun define-error (name message &optional parent) - "Define NAME as a new error signal. -MESSAGE is a string that will be output to the echo area if such an error -is signaled without being caught by a `condition-case'. -PARENT is either a signal or a list of signals from which it inherits. -Defaults to `error'." - (unless parent (setq parent 'error)) - (let ((conditions - (if (consp parent) - (apply #'append - (mapcar (lambda (parent) - (cons parent - (or (get parent 'error-conditions) - (error "Unknown signal `%s'" parent)))) - parent)) - (cons parent (get parent 'error-conditions))))) - (put name 'error-conditions - (delete-dups (copy-sequence (cons name conditions)))) - (when message (put name 'error-message message)))) - -;; We put this here instead of in frame.el so that it's defined even on -;; systems where frame.el isn't loaded. -(defun frame-configuration-p (object) - "Return non-nil if OBJECT seems to be a frame configuration. -Any list whose car is `frame-configuration' is assumed to be a frame -configuration." - (and (consp object) - (eq (car object) 'frame-configuration))) - - -;;;; List functions. - -(defsubst caar (x) - "Return the car of the car of X." - (car (car x))) - -(defsubst cadr (x) - "Return the car of the cdr of X." - (car (cdr x))) - -(defsubst cdar (x) - "Return the cdr of the car of X." - (cdr (car x))) - -(defsubst cddr (x) - "Return the cdr of the cdr of X." - (cdr (cdr x))) - -(defun last (list &optional n) - "Return the last link of LIST. Its car is the last element. -If LIST is nil, return nil. -If N is non-nil, return the Nth-to-last link of LIST. -If N is bigger than the length of LIST, return LIST." - (if n - (and (>= n 0) - (let ((m (safe-length list))) - (if (< n m) (nthcdr (- m n) list) list))) - (and list - (nthcdr (1- (safe-length list)) list)))) - -(defun butlast (list &optional n) - "Return a copy of LIST with the last N elements removed. -If N is omitted or nil, the last element is removed from the -copy." - (if (and n (<= n 0)) list - (nbutlast (copy-sequence list) n))) - -(defun nbutlast (list &optional n) - "Modifies LIST to remove the last N elements. -If N is omitted or nil, remove the last element." - (let ((m (length list))) - (or n (setq n 1)) - (and (< n m) - (progn - (if (> n 0) (setcdr (nthcdr (- (1- m) n) list) nil)) - list)))) - -(defun zerop (number) - "Return t if NUMBER is zero." - ;; Used to be in C, but it's pointless since (= 0 n) is faster anyway because - ;; = has a byte-code. - (declare (compiler-macro (lambda (_) `(= 0 ,number)))) - (= 0 number)) - -(defun delete-dups (list) - "Destructively remove `equal' duplicates from LIST. -Store the result in LIST and return it. LIST must be a proper list. -Of several `equal' occurrences of an element in LIST, the first -one is kept." - (let ((tail list)) - (while tail - (setcdr tail (delete (car tail) (cdr tail))) - (setq tail (cdr tail)))) - list) - -;; See http://lists.gnu.org/archive/html/emacs-devel/2013-05/msg00204.html -(defun delete-consecutive-dups (list &optional circular) - "Destructively remove `equal' consecutive duplicates from LIST. -First and last elements are considered consecutive if CIRCULAR is -non-nil." - (let ((tail list) last) - (while (consp tail) - (if (equal (car tail) (cadr tail)) - (setcdr tail (cddr tail)) - (setq last (car tail) - tail (cdr tail)))) - (if (and circular - (cdr list) - (equal last (car list))) - (nbutlast list) - list))) - -(defun number-sequence (from &optional to inc) - "Return a sequence of numbers from FROM to TO (both inclusive) as a list. -INC is the increment used between numbers in the sequence and defaults to 1. -So, the Nth element of the list is (+ FROM (* N INC)) where N counts from -zero. TO is only included if there is an N for which TO = FROM + N * INC. -If TO is nil or numerically equal to FROM, return (FROM). -If INC is positive and TO is less than FROM, or INC is negative -and TO is larger than FROM, return nil. -If INC is zero and TO is neither nil nor numerically equal to -FROM, signal an error. - -This function is primarily designed for integer arguments. -Nevertheless, FROM, TO and INC can be integer or float. However, -floating point arithmetic is inexact. For instance, depending on -the machine, it may quite well happen that -\(number-sequence 0.4 0.6 0.2) returns the one element list (0.4), -whereas (number-sequence 0.4 0.8 0.2) returns a list with three -elements. Thus, if some of the arguments are floats and one wants -to make sure that TO is included, one may have to explicitly write -TO as (+ FROM (* N INC)) or use a variable whose value was -computed with this exact expression. Alternatively, you can, -of course, also replace TO with a slightly larger value -\(or a slightly more negative value if INC is negative)." - (if (or (not to) (= from to)) - (list from) - (or inc (setq inc 1)) - (when (zerop inc) (error "The increment can not be zero")) - (let (seq (n 0) (next from)) - (if (> inc 0) - (while (<= next to) - (setq seq (cons next seq) - n (1+ n) - next (+ from (* n inc)))) - (while (>= next to) - (setq seq (cons next seq) - n (1+ n) - next (+ from (* n inc))))) - (nreverse seq)))) - -(defun copy-tree (tree &optional vecp) - "Make a copy of TREE. -If TREE is a cons cell, this recursively copies both its car and its cdr. -Contrast to `copy-sequence', which copies only along the cdrs. With second -argument VECP, this copies vectors as well as conses." - (if (consp tree) - (let (result) - (while (consp tree) - (let ((newcar (car tree))) - (if (or (consp (car tree)) (and vecp (vectorp (car tree)))) - (setq newcar (copy-tree (car tree) vecp))) - (push newcar result)) - (setq tree (cdr tree))) - (nconc (nreverse result) tree)) - (if (and vecp (vectorp tree)) - (let ((i (length (setq tree (copy-sequence tree))))) - (while (>= (setq i (1- i)) 0) - (aset tree i (copy-tree (aref tree i) vecp))) - tree) - tree))) - -;;;; Various list-search functions. - -(defun assoc-default (key alist &optional test default) - "Find object KEY in a pseudo-alist ALIST. -ALIST is a list of conses or objects. Each element - (or the element's car, if it is a cons) is compared with KEY by - calling TEST, with two arguments: (i) the element or its car, - and (ii) KEY. -If that is non-nil, the element matches; then `assoc-default' - returns the element's cdr, if it is a cons, or DEFAULT if the - element is not a cons. - -If no element matches, the value is nil. -If TEST is omitted or nil, `equal' is used." - (let (found (tail alist) value) - (while (and tail (not found)) - (let ((elt (car tail))) - (when (funcall (or test 'equal) (if (consp elt) (car elt) elt) key) - (setq found t value (if (consp elt) (cdr elt) default)))) - (setq tail (cdr tail))) - value)) - -(defun assoc-ignore-case (key alist) - "Like `assoc', but ignores differences in case and text representation. -KEY must be a string. Upper-case and lower-case letters are treated as equal. -Unibyte strings are converted to multibyte for comparison." - (declare (obsolete assoc-string "22.1")) - (assoc-string key alist t)) - -(defun assoc-ignore-representation (key alist) - "Like `assoc', but ignores differences in text representation. -KEY must be a string. -Unibyte strings are converted to multibyte for comparison." - (declare (obsolete assoc-string "22.1")) - (assoc-string key alist nil)) - -(defun member-ignore-case (elt list) - "Like `member', but ignore differences in case and text representation. -ELT must be a string. Upper-case and lower-case letters are treated as equal. -Unibyte strings are converted to multibyte for comparison. -Non-strings in LIST are ignored." - (while (and list - (not (and (stringp (car list)) - (eq t (compare-strings elt 0 nil (car list) 0 nil t))))) - (setq list (cdr list))) - list) - -(defun assq-delete-all (key alist) - "Delete from ALIST all elements whose car is `eq' to KEY. -Return the modified alist. -Elements of ALIST that are not conses are ignored." - (while (and (consp (car alist)) - (eq (car (car alist)) key)) - (setq alist (cdr alist))) - (let ((tail alist) tail-cdr) - (while (setq tail-cdr (cdr tail)) - (if (and (consp (car tail-cdr)) - (eq (car (car tail-cdr)) key)) - (setcdr tail (cdr tail-cdr)) - (setq tail tail-cdr)))) - alist) - -(defun rassq-delete-all (value alist) - "Delete from ALIST all elements whose cdr is `eq' to VALUE. -Return the modified alist. -Elements of ALIST that are not conses are ignored." - (while (and (consp (car alist)) - (eq (cdr (car alist)) value)) - (setq alist (cdr alist))) - (let ((tail alist) tail-cdr) - (while (setq tail-cdr (cdr tail)) - (if (and (consp (car tail-cdr)) - (eq (cdr (car tail-cdr)) value)) - (setcdr tail (cdr tail-cdr)) - (setq tail tail-cdr)))) - alist) - -(defun alist-get (key alist &optional default remove) - "Get the value associated to KEY in ALIST. -DEFAULT is the value to return if KEY is not found in ALIST. -REMOVE, if non-nil, means that when setting this element, we should -remove the entry if the new value is `eql' to DEFAULT." - (ignore remove) ;;Silence byte-compiler. - (let ((x (assq key alist))) - (if x (cdr x) default))) - -(defun remove (elt seq) - "Return a copy of SEQ with all occurrences of ELT removed. -SEQ must be a list, vector, or string. The comparison is done with `equal'." - (if (nlistp seq) - ;; If SEQ isn't a list, there's no need to copy SEQ because - ;; `delete' will return a new object. - (delete elt seq) - (delete elt (copy-sequence seq)))) - -(defun remq (elt list) - "Return LIST with all occurrences of ELT removed. -The comparison is done with `eq'. Contrary to `delq', this does not use -side-effects, and the argument LIST is not modified." - (while (and (eq elt (car list)) (setq list (cdr list)))) - (if (memq elt list) - (delq elt (copy-sequence list)) - list)) - -;;;; Keymap support. - -(defun kbd (keys) - "Convert KEYS to the internal Emacs key representation. -KEYS should be a string constant in the format used for -saving keyboard macros (see `edmacro-mode')." - ;; Don't use a defalias, since the `pure' property is only true for - ;; the calling convention of `kbd'. - (read-kbd-macro keys)) -(put 'kbd 'pure t) - -(defun undefined () - "Beep to tell the user this binding is undefined." - (interactive) - (ding) - (message "%s is undefined" (key-description (this-single-command-keys))) - (setq defining-kbd-macro nil) - (force-mode-line-update) - ;; If this is a down-mouse event, don't reset prefix-arg; - ;; pass it to the command run by the up event. - (setq prefix-arg - (when (memq 'down (event-modifiers last-command-event)) - current-prefix-arg))) - -;; Prevent the \{...} documentation construct -;; from mentioning keys that run this command. -(put 'undefined 'suppress-keymap t) - -(defun suppress-keymap (map &optional nodigits) - "Make MAP override all normally self-inserting keys to be undefined. -Normally, as an exception, digits and minus-sign are set to make prefix args, -but optional second arg NODIGITS non-nil treats them like other chars." - (define-key map [remap self-insert-command] 'undefined) - (or nodigits - (let (loop) - (define-key map "-" 'negative-argument) - ;; Make plain numbers do numeric args. - (setq loop ?0) - (while (<= loop ?9) - (define-key map (char-to-string loop) 'digit-argument) - (setq loop (1+ loop)))))) - -(defun make-composed-keymap (maps &optional parent) - "Construct a new keymap composed of MAPS and inheriting from PARENT. -When looking up a key in the returned map, the key is looked in each -keymap of MAPS in turn until a binding is found. -If no binding is found in MAPS, the lookup continues in PARENT, if non-nil. -As always with keymap inheritance, a nil binding in MAPS overrides -any corresponding binding in PARENT, but it does not override corresponding -bindings in other keymaps of MAPS. -MAPS can be a list of keymaps or a single keymap. -PARENT if non-nil should be a keymap." - `(keymap - ,@(if (keymapp maps) (list maps) maps) - ,@parent)) - -(defun define-key-after (keymap key definition &optional after) - "Add binding in KEYMAP for KEY => DEFINITION, right after AFTER's binding. -This is like `define-key' except that the binding for KEY is placed -just after the binding for the event AFTER, instead of at the beginning -of the map. Note that AFTER must be an event type (like KEY), NOT a command -\(like DEFINITION). - -If AFTER is t or omitted, the new binding goes at the end of the keymap. -AFTER should be a single event type--a symbol or a character, not a sequence. - -Bindings are always added before any inherited map. - -The order of bindings in a keymap only matters when it is used as -a menu, so this function is not useful for non-menu keymaps." - (unless after (setq after t)) - (or (keymapp keymap) - (signal 'wrong-type-argument (list 'keymapp keymap))) - (setq key - (if (<= (length key) 1) (aref key 0) - (setq keymap (lookup-key keymap - (apply 'vector - (butlast (mapcar 'identity key))))) - (aref key (1- (length key))))) - (let ((tail keymap) done inserted) - (while (and (not done) tail) - ;; Delete any earlier bindings for the same key. - (if (eq (car-safe (car (cdr tail))) key) - (setcdr tail (cdr (cdr tail)))) - ;; If we hit an included map, go down that one. - (if (keymapp (car tail)) (setq tail (car tail))) - ;; When we reach AFTER's binding, insert the new binding after. - ;; If we reach an inherited keymap, insert just before that. - ;; If we reach the end of this keymap, insert at the end. - (if (or (and (eq (car-safe (car tail)) after) - (not (eq after t))) - (eq (car (cdr tail)) 'keymap) - (null (cdr tail))) - (progn - ;; Stop the scan only if we find a parent keymap. - ;; Keep going past the inserted element - ;; so we can delete any duplications that come later. - (if (eq (car (cdr tail)) 'keymap) - (setq done t)) - ;; Don't insert more than once. - (or inserted - (setcdr tail (cons (cons key definition) (cdr tail)))) - (setq inserted t))) - (setq tail (cdr tail))))) - -(defun map-keymap-sorted (function keymap) - "Implement `map-keymap' with sorting. -Don't call this function; it is for internal use only." - (let (list) - (map-keymap (lambda (a b) (push (cons a b) list)) - keymap) - (setq list (sort list - (lambda (a b) - (setq a (car a) b (car b)) - (if (integerp a) - (if (integerp b) (< a b) - t) - (if (integerp b) t - ;; string< also accepts symbols. - (string< a b)))))) - (dolist (p list) - (funcall function (car p) (cdr p))))) - -(defun keymap--menu-item-binding (val) - "Return the binding part of a menu-item." - (cond - ((not (consp val)) val) ;Not a menu-item. - ((eq 'menu-item (car val)) - (let* ((binding (nth 2 val)) - (plist (nthcdr 3 val)) - (filter (plist-get plist :filter))) - (if filter (funcall filter binding) - binding))) - ((and (consp (cdr val)) (stringp (cadr val))) - (cddr val)) - ((stringp (car val)) - (cdr val)) - (t val))) ;Not a menu-item either. - -(defun keymap--menu-item-with-binding (item binding) - "Build a menu-item like ITEM but with its binding changed to BINDING." - (cond - ((not (consp item)) binding) ;Not a menu-item. - ((eq 'menu-item (car item)) - (setq item (copy-sequence item)) - (let ((tail (nthcdr 2 item))) - (setcar tail binding) - ;; Remove any potential filter. - (if (plist-get (cdr tail) :filter) - (setcdr tail (plist-put (cdr tail) :filter nil)))) - item) - ((and (consp (cdr item)) (stringp (cadr item))) - (cons (car item) (cons (cadr item) binding))) - (t (cons (car item) binding)))) - -(defun keymap--merge-bindings (val1 val2) - "Merge bindings VAL1 and VAL2." - (let ((map1 (keymap--menu-item-binding val1)) - (map2 (keymap--menu-item-binding val2))) - (if (not (and (keymapp map1) (keymapp map2))) - ;; There's nothing to merge: val1 takes precedence. - val1 - (let ((map (list 'keymap map1 map2)) - (item (if (keymapp val1) (if (keymapp val2) nil val2) val1))) - (keymap--menu-item-with-binding item map))))) - -(defun keymap-canonicalize (map) - "Return a simpler equivalent keymap. -This resolves inheritance and redefinitions. The returned keymap -should behave identically to a copy of KEYMAP w.r.t `lookup-key' -and use in active keymaps and menus. -Subkeymaps may be modified but are not canonicalized." - ;; FIXME: Problem with the difference between a nil binding - ;; that hides a binding in an inherited map and a nil binding that's ignored - ;; to let some further binding visible. Currently a nil binding hides all. - ;; FIXME: we may want to carefully (re)order elements in case they're - ;; menu-entries. - (let ((bindings ()) - (ranges ()) - (prompt (keymap-prompt map))) - (while (keymapp map) - (setq map (map-keymap ;; -internal - (lambda (key item) - (if (consp key) - ;; Treat char-ranges specially. - (push (cons key item) ranges) - (push (cons key item) bindings))) - map))) - ;; Create the new map. - (setq map (funcall (if ranges 'make-keymap 'make-sparse-keymap) prompt)) - (dolist (binding ranges) - ;; Treat char-ranges specially. FIXME: need to merge as well. - (define-key map (vector (car binding)) (cdr binding))) - ;; Process the bindings starting from the end. - (dolist (binding (prog1 bindings (setq bindings ()))) - (let* ((key (car binding)) - (oldbind (assq key bindings))) - (push (if (not oldbind) - ;; The normal case: no duplicate bindings. - binding - ;; This is the second binding for this key. - (setq bindings (delq oldbind bindings)) - (cons key (keymap--merge-bindings (cdr binding) - (cdr oldbind)))) - bindings))) - (nconc map bindings))) - -(put 'keyboard-translate-table 'char-table-extra-slots 0) - -(defun keyboard-translate (from to) - "Translate character FROM to TO on the current terminal. -This function creates a `keyboard-translate-table' if necessary -and then modifies one entry in it." - (or (char-table-p keyboard-translate-table) - (setq keyboard-translate-table - (make-char-table 'keyboard-translate-table nil))) - (aset keyboard-translate-table from to)) - -;;;; Key binding commands. - -(defun global-set-key (key command) - "Give KEY a global binding as COMMAND. -COMMAND is the command definition to use; usually it is -a symbol naming an interactively-callable function. -KEY is a key sequence; noninteractively, it is a string or vector -of characters or event types, and non-ASCII characters with codes -above 127 (such as ISO Latin-1) can be included if you use a vector. - -Note that if KEY has a local binding in the current buffer, -that local binding will continue to shadow any global binding -that you make with this function." - (interactive "KSet key globally: \nCSet key %s to command: ") - (or (vectorp key) (stringp key) - (signal 'wrong-type-argument (list 'arrayp key))) - (define-key (current-global-map) key command)) - -(defun local-set-key (key command) - "Give KEY a local binding as COMMAND. -COMMAND is the command definition to use; usually it is -a symbol naming an interactively-callable function. -KEY is a key sequence; noninteractively, it is a string or vector -of characters or event types, and non-ASCII characters with codes -above 127 (such as ISO Latin-1) can be included if you use a vector. - -The binding goes in the current buffer's local map, which in most -cases is shared with all other buffers in the same major mode." - (interactive "KSet key locally: \nCSet key %s locally to command: ") - (let ((map (current-local-map))) - (or map - (use-local-map (setq map (make-sparse-keymap)))) - (or (vectorp key) (stringp key) - (signal 'wrong-type-argument (list 'arrayp key))) - (define-key map key command))) - -(defun global-unset-key (key) - "Remove global binding of KEY. -KEY is a string or vector representing a sequence of keystrokes." - (interactive "kUnset key globally: ") - (global-set-key key nil)) - -(defun local-unset-key (key) - "Remove local binding of KEY. -KEY is a string or vector representing a sequence of keystrokes." - (interactive "kUnset key locally: ") - (if (current-local-map) - (local-set-key key nil)) - nil) - -;;;; substitute-key-definition and its subroutines. - -(defvar key-substitution-in-progress nil - "Used internally by `substitute-key-definition'.") - -(defun substitute-key-definition (olddef newdef keymap &optional oldmap prefix) - "Replace OLDDEF with NEWDEF for any keys in KEYMAP now defined as OLDDEF. -In other words, OLDDEF is replaced with NEWDEF where ever it appears. -Alternatively, if optional fourth argument OLDMAP is specified, we redefine -in KEYMAP as NEWDEF those keys which are defined as OLDDEF in OLDMAP. - -If you don't specify OLDMAP, you can usually get the same results -in a cleaner way with command remapping, like this: - (define-key KEYMAP [remap OLDDEF] NEWDEF) -\n(fn OLDDEF NEWDEF KEYMAP &optional OLDMAP)" - ;; Don't document PREFIX in the doc string because we don't want to - ;; advertise it. It's meant for recursive calls only. Here's its - ;; meaning - - ;; If optional argument PREFIX is specified, it should be a key - ;; prefix, a string. Redefined bindings will then be bound to the - ;; original key, with PREFIX added at the front. - (or prefix (setq prefix "")) - (let* ((scan (or oldmap keymap)) - (prefix1 (vconcat prefix [nil])) - (key-substitution-in-progress - (cons scan key-substitution-in-progress))) - ;; Scan OLDMAP, finding each char or event-symbol that - ;; has any definition, and act on it with hack-key. - (map-keymap - (lambda (char defn) - (aset prefix1 (length prefix) char) - (substitute-key-definition-key defn olddef newdef prefix1 keymap)) - scan))) - -(defun substitute-key-definition-key (defn olddef newdef prefix keymap) - (let (inner-def skipped menu-item) - ;; Find the actual command name within the binding. - (if (eq (car-safe defn) 'menu-item) - (setq menu-item defn defn (nth 2 defn)) - ;; Skip past menu-prompt. - (while (stringp (car-safe defn)) - (push (pop defn) skipped)) - ;; Skip past cached key-equivalence data for menu items. - (if (consp (car-safe defn)) - (setq defn (cdr defn)))) - (if (or (eq defn olddef) - ;; Compare with equal if definition is a key sequence. - ;; That is useful for operating on function-key-map. - (and (or (stringp defn) (vectorp defn)) - (equal defn olddef))) - (define-key keymap prefix - (if menu-item - (let ((copy (copy-sequence menu-item))) - (setcar (nthcdr 2 copy) newdef) - copy) - (nconc (nreverse skipped) newdef))) - ;; Look past a symbol that names a keymap. - (setq inner-def - (or (indirect-function defn t) defn)) - ;; For nested keymaps, we use `inner-def' rather than `defn' so as to - ;; avoid autoloading a keymap. This is mostly done to preserve the - ;; original non-autoloading behavior of pre-map-keymap times. - (if (and (keymapp inner-def) - ;; Avoid recursively scanning - ;; where KEYMAP does not have a submap. - (let ((elt (lookup-key keymap prefix))) - (or (null elt) (natnump elt) (keymapp elt))) - ;; Avoid recursively rescanning keymap being scanned. - (not (memq inner-def key-substitution-in-progress))) - ;; If this one isn't being scanned already, scan it now. - (substitute-key-definition olddef newdef keymap inner-def prefix))))) - - -;;;; The global keymap tree. - -;; global-map, esc-map, and ctl-x-map have their values set up in -;; keymap.c; we just give them docstrings here. - -(defvar global-map nil - "Default global keymap mapping Emacs keyboard input into commands. -The value is a keymap which is usually (but not necessarily) Emacs's -global map.") - -(defvar esc-map nil - "Default keymap for ESC (meta) commands. -The normal global definition of the character ESC indirects to this keymap.") - -(defvar ctl-x-map nil - "Default keymap for C-x commands. -The normal global definition of the character C-x indirects to this keymap.") - -(defvar ctl-x-4-map (make-sparse-keymap) - "Keymap for subcommands of C-x 4.") -(defalias 'ctl-x-4-prefix ctl-x-4-map) -(define-key ctl-x-map "4" 'ctl-x-4-prefix) - -(defvar ctl-x-5-map (make-sparse-keymap) - "Keymap for frame commands.") -(defalias 'ctl-x-5-prefix ctl-x-5-map) -(define-key ctl-x-map "5" 'ctl-x-5-prefix) - - -;;;; Event manipulation functions. - -(defconst listify-key-sequence-1 (logior 128 ?\M-\C-@)) - -(defun listify-key-sequence (key) - "Convert a key sequence to a list of events." - (if (vectorp key) - (append key nil) - (mapcar (function (lambda (c) - (if (> c 127) - (logxor c listify-key-sequence-1) - c))) - key))) - -(defun eventp (obj) - "True if the argument is an event object." - (when obj - (or (integerp obj) - (and (symbolp obj) obj (not (keywordp obj))) - (and (consp obj) (symbolp (car obj)))))) - -(defun event-modifiers (event) - "Return a list of symbols representing the modifier keys in event EVENT. -The elements of the list may include `meta', `control', -`shift', `hyper', `super', `alt', `click', `double', `triple', `drag', -and `down'. -EVENT may be an event or an event type. If EVENT is a symbol -that has never been used in an event that has been read as input -in the current Emacs session, then this function may fail to include -the `click' modifier." - (let ((type event)) - (if (listp type) - (setq type (car type))) - (if (symbolp type) - ;; Don't read event-symbol-elements directly since we're not - ;; sure the symbol has already been parsed. - (cdr (internal-event-symbol-parse-modifiers type)) - (let ((list nil) - (char (logand type (lognot (logior ?\M-\^@ ?\C-\^@ ?\S-\^@ - ?\H-\^@ ?\s-\^@ ?\A-\^@))))) - (if (not (zerop (logand type ?\M-\^@))) - (push 'meta list)) - (if (or (not (zerop (logand type ?\C-\^@))) - (< char 32)) - (push 'control list)) - (if (or (not (zerop (logand type ?\S-\^@))) - (/= char (downcase char))) - (push 'shift list)) - (or (zerop (logand type ?\H-\^@)) - (push 'hyper list)) - (or (zerop (logand type ?\s-\^@)) - (push 'super list)) - (or (zerop (logand type ?\A-\^@)) - (push 'alt list)) - list)))) - -(defun event-basic-type (event) - "Return the basic type of the given event (all modifiers removed). -The value is a printing character (not upper case) or a symbol. -EVENT may be an event or an event type. If EVENT is a symbol -that has never been used in an event that has been read as input -in the current Emacs session, then this function may return nil." - (if (consp event) - (setq event (car event))) - (if (symbolp event) - (car (get event 'event-symbol-elements)) - (let* ((base (logand event (1- ?\A-\^@))) - (uncontrolled (if (< base 32) (logior base 64) base))) - ;; There are some numbers that are invalid characters and - ;; cause `downcase' to get an error. - (condition-case () - (downcase uncontrolled) - (error uncontrolled))))) - -(defsubst mouse-movement-p (object) - "Return non-nil if OBJECT is a mouse movement event." - (eq (car-safe object) 'mouse-movement)) - -(defun mouse-event-p (object) - "Return non-nil if OBJECT is a mouse click event." - ;; is this really correct? maybe remove mouse-movement? - (memq (event-basic-type object) '(mouse-1 mouse-2 mouse-3 mouse-movement))) - -(defun event-start (event) - "Return the starting position of EVENT. -EVENT should be a mouse click, drag, or key press event. If -EVENT is nil, the value of `posn-at-point' is used instead. - -The following accessor functions are used to access the elements -of the position: - -`posn-window': The window the event is in. -`posn-area': A symbol identifying the area the event occurred in, -or nil if the event occurred in the text area. -`posn-point': The buffer position of the event. -`posn-x-y': The pixel-based coordinates of the event. -`posn-col-row': The estimated column and row corresponding to the -position of the event. -`posn-actual-col-row': The actual column and row corresponding to the -position of the event. -`posn-string': The string object of the event, which is either -nil or (STRING . POSITION)'. -`posn-image': The image object of the event, if any. -`posn-object': The image or string object of the event, if any. -`posn-timestamp': The time the event occurred, in milliseconds. - -For more information, see Info node `(elisp)Click Events'." - (if (consp event) (nth 1 event) - (or (posn-at-point) - (list (selected-window) (point) '(0 . 0) 0)))) - -(defun event-end (event) - "Return the ending position of EVENT. -EVENT should be a click, drag, or key press event. - -See `event-start' for a description of the value returned." - (if (consp event) (nth (if (consp (nth 2 event)) 2 1) event) - (or (posn-at-point) - (list (selected-window) (point) '(0 . 0) 0)))) - -(defsubst event-click-count (event) - "Return the multi-click count of EVENT, a click or drag event. -The return value is a positive integer." - (if (and (consp event) (integerp (nth 2 event))) (nth 2 event) 1)) - -;;;; Extracting fields of the positions in an event. - -(defun posnp (obj) - "Return non-nil if OBJ appears to be a valid `posn' object specifying a window. -If OBJ is a valid `posn' object, but specifies a frame rather -than a window, return nil." - ;; FIXME: Correct the behavior of this function so that all valid - ;; `posn' objects are recognized, after updating other code that - ;; depends on its present behavior. - (and (windowp (car-safe obj)) - (atom (car-safe (setq obj (cdr obj)))) ;AREA-OR-POS. - (integerp (car-safe (car-safe (setq obj (cdr obj))))) ;XOFFSET. - (integerp (car-safe (cdr obj))))) ;TIMESTAMP. - -(defsubst posn-window (position) - "Return the window in POSITION. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions." - (nth 0 position)) - -(defsubst posn-area (position) - "Return the window area recorded in POSITION, or nil for the text area. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions." - (let ((area (if (consp (nth 1 position)) - (car (nth 1 position)) - (nth 1 position)))) - (and (symbolp area) area))) - -(defun posn-point (position) - "Return the buffer location in POSITION. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions. -Returns nil if POSITION does not correspond to any buffer location (e.g. -a click on a scroll bar)." - (or (nth 5 position) - (let ((pt (nth 1 position))) - (or (car-safe pt) - ;; Apparently this can also be `vertical-scroll-bar' (bug#13979). - (if (integerp pt) pt))))) - -(defun posn-set-point (position) - "Move point to POSITION. -Select the corresponding window as well." - (if (not (windowp (posn-window position))) - (error "Position not in text area of window")) - (select-window (posn-window position)) - (if (numberp (posn-point position)) - (goto-char (posn-point position)))) - -(defsubst posn-x-y (position) - "Return the x and y coordinates in POSITION. -The return value has the form (X . Y), where X and Y are given in -pixels. POSITION should be a list of the form returned by -`event-start' and `event-end'." - (nth 2 position)) - -(declare-function scroll-bar-scale "scroll-bar" (num-denom whole)) - -(defun posn-col-row (position) - "Return the nominal column and row in POSITION, measured in characters. -The column and row values are approximations calculated from the x -and y coordinates in POSITION and the frame's default character width -and default line height, including spacing. -For a scroll-bar event, the result column is 0, and the row -corresponds to the vertical position of the click in the scroll bar. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions." - (let* ((pair (posn-x-y position)) - (frame-or-window (posn-window position)) - (frame (if (framep frame-or-window) - frame-or-window - (window-frame frame-or-window))) - (window (when (windowp frame-or-window) frame-or-window)) - (area (posn-area position))) - (cond - ((null frame-or-window) - '(0 . 0)) - ((eq area 'vertical-scroll-bar) - (cons 0 (scroll-bar-scale pair (1- (window-height window))))) - ((eq area 'horizontal-scroll-bar) - (cons (scroll-bar-scale pair (window-width window)) 0)) - (t - ;; FIXME: This should take line-spacing properties on - ;; newlines into account. - (let* ((spacing (when (display-graphic-p frame) - (or (with-current-buffer - (window-buffer (frame-selected-window frame)) - line-spacing) - (frame-parameter frame 'line-spacing))))) - (cond ((floatp spacing) - (setq spacing (truncate (* spacing - (frame-char-height frame))))) - ((null spacing) - (setq spacing 0))) - (cons (/ (car pair) (frame-char-width frame)) - (/ (cdr pair) (+ (frame-char-height frame) spacing)))))))) - -(defun posn-actual-col-row (position) - "Return the window row number in POSITION and character number in that row. - -Return nil if POSITION does not contain the actual position; in that case -\`posn-col-row' can be used to get approximate values. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions. - -This function does not account for the width on display, like the -number of visual columns taken by a TAB or image. If you need -the coordinates of POSITION in character units, you should use -\`posn-col-row', not this function." - (nth 6 position)) - -(defsubst posn-timestamp (position) - "Return the timestamp of POSITION. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions." - (nth 3 position)) - -(defun posn-string (position) - "Return the string object of POSITION. -Value is a cons (STRING . STRING-POS), or nil if not a string. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions." - (let ((x (nth 4 position))) - ;; Apparently this can also be `handle' or `below-handle' (bug#13979). - (when (consp x) x))) - -(defsubst posn-image (position) - "Return the image object of POSITION. -Value is a list (image ...), or nil if not an image. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions." - (nth 7 position)) - -(defsubst posn-object (position) - "Return the object (image or string) of POSITION. -Value is a list (image ...) for an image object, a cons cell -\(STRING . STRING-POS) for a string object, and nil for a buffer position. -POSITION should be a list of the form returned by the `event-start' -and `event-end' functions." - (or (posn-image position) (posn-string position))) - -(defsubst posn-object-x-y (position) - "Return the x and y coordinates relative to the object of POSITION. -The return value has the form (DX . DY), where DX and DY are -given in pixels. POSITION should be a list of the form returned -by `event-start' and `event-end'." - (nth 8 position)) - -(defsubst posn-object-width-height (position) - "Return the pixel width and height of the object of POSITION. -The return value has the form (WIDTH . HEIGHT). POSITION should -be a list of the form returned by `event-start' and `event-end'." - (nth 9 position)) - - -;;;; Obsolescent names for functions. - -(define-obsolete-function-alias 'window-dot 'window-point "22.1") -(define-obsolete-function-alias 'set-window-dot 'set-window-point "22.1") -(define-obsolete-function-alias 'read-input 'read-string "22.1") -(define-obsolete-function-alias 'show-buffer 'set-window-buffer "22.1") -(define-obsolete-function-alias 'eval-current-buffer 'eval-buffer "22.1") -(define-obsolete-function-alias 'string-to-int 'string-to-number "22.1") - -(make-obsolete 'forward-point "use (+ (point) N) instead." "23.1") -(make-obsolete 'buffer-has-markers-at nil "24.3") - -(defun insert-string (&rest args) - "Mocklisp-compatibility insert function. -Like the function `insert' except that any argument that is a number -is converted into a string by expressing it in decimal." - (declare (obsolete insert "22.1")) - (dolist (el args) - (insert (if (integerp el) (number-to-string el) el)))) - -(defun makehash (&optional test) - (declare (obsolete make-hash-table "22.1")) - (make-hash-table :test (or test 'eql))) - -(defun log10 (x) - "Return (log X 10), the log base 10 of X." - (declare (obsolete log "24.4")) - (log x 10)) - -;; These are used by VM and some old programs -(defalias 'focus-frame 'ignore "") -(make-obsolete 'focus-frame "it does nothing." "22.1") -(defalias 'unfocus-frame 'ignore "") -(make-obsolete 'unfocus-frame "it does nothing." "22.1") -(make-obsolete 'make-variable-frame-local - "explicitly check for a frame-parameter instead." "22.2") -(set-advertised-calling-convention - 'all-completions '(string collection &optional predicate) "23.1") -(set-advertised-calling-convention 'unintern '(name obarray) "23.3") -(set-advertised-calling-convention 'indirect-function '(object) "25.1") -(set-advertised-calling-convention 'redirect-frame-focus '(frame focus-frame) "24.3") -(set-advertised-calling-convention 'decode-char '(ch charset) "21.4") -(set-advertised-calling-convention 'encode-char '(ch charset) "21.4") - -;;;; Obsolescence declarations for variables, and aliases. - -;; Special "default-FOO" variables which contain the default value of -;; the "FOO" variable are nasty. Their implementation is brittle, and -;; slows down several unrelated variable operations; furthermore, they -;; can lead to really odd behavior if you decide to make them -;; buffer-local. - -;; Not used at all in Emacs, last time I checked: -(make-obsolete-variable 'default-mode-line-format 'mode-line-format "23.2") -(make-obsolete-variable 'default-header-line-format 'header-line-format "23.2") -(make-obsolete-variable 'default-line-spacing 'line-spacing "23.2") -(make-obsolete-variable 'default-abbrev-mode 'abbrev-mode "23.2") -(make-obsolete-variable 'default-ctl-arrow 'ctl-arrow "23.2") -(make-obsolete-variable 'default-truncate-lines 'truncate-lines "23.2") -(make-obsolete-variable 'default-left-margin 'left-margin "23.2") -(make-obsolete-variable 'default-tab-width 'tab-width "23.2") -(make-obsolete-variable 'default-case-fold-search 'case-fold-search "23.2") -(make-obsolete-variable 'default-left-margin-width 'left-margin-width "23.2") -(make-obsolete-variable 'default-right-margin-width 'right-margin-width "23.2") -(make-obsolete-variable 'default-left-fringe-width 'left-fringe-width "23.2") -(make-obsolete-variable 'default-right-fringe-width 'right-fringe-width "23.2") -(make-obsolete-variable 'default-fringes-outside-margins 'fringes-outside-margins "23.2") -(make-obsolete-variable 'default-scroll-bar-width 'scroll-bar-width "23.2") -(make-obsolete-variable 'default-vertical-scroll-bar 'vertical-scroll-bar "23.2") -(make-obsolete-variable 'default-indicate-empty-lines 'indicate-empty-lines "23.2") -(make-obsolete-variable 'default-indicate-buffer-boundaries 'indicate-buffer-boundaries "23.2") -(make-obsolete-variable 'default-fringe-indicator-alist 'fringe-indicator-alist "23.2") -(make-obsolete-variable 'default-fringe-cursor-alist 'fringe-cursor-alist "23.2") -(make-obsolete-variable 'default-scroll-up-aggressively 'scroll-up-aggressively "23.2") -(make-obsolete-variable 'default-scroll-down-aggressively 'scroll-down-aggressively "23.2") -(make-obsolete-variable 'default-fill-column 'fill-column "23.2") -(make-obsolete-variable 'default-cursor-type 'cursor-type "23.2") -(make-obsolete-variable 'default-cursor-in-non-selected-windows 'cursor-in-non-selected-windows "23.2") -(make-obsolete-variable 'default-buffer-file-coding-system 'buffer-file-coding-system "23.2") -(make-obsolete-variable 'default-major-mode 'major-mode "23.2") -(make-obsolete-variable 'default-enable-multibyte-characters - "use enable-multibyte-characters or set-buffer-multibyte instead" "23.2") - -(make-obsolete-variable 'define-key-rebound-commands nil "23.2") -(make-obsolete-variable 'redisplay-end-trigger-functions 'jit-lock-register "23.1") -(make-obsolete-variable 'deferred-action-list 'post-command-hook "24.1") -(make-obsolete-variable 'deferred-action-function 'post-command-hook "24.1") -(make-obsolete-variable 'redisplay-dont-pause nil "24.5") -(make-obsolete 'window-redisplay-end-trigger nil "23.1") -(make-obsolete 'set-window-redisplay-end-trigger nil "23.1") - -(make-obsolete 'process-filter-multibyte-p nil "23.1") -(make-obsolete 'set-process-filter-multibyte nil "23.1") - -;; Lisp manual only updated in 22.1. -(define-obsolete-variable-alias 'executing-macro 'executing-kbd-macro - "before 19.34") - -(define-obsolete-variable-alias 'x-lost-selection-hooks - 'x-lost-selection-functions "22.1") -(define-obsolete-variable-alias 'x-sent-selection-hooks - 'x-sent-selection-functions "22.1") - -;; This was introduced in 21.4 for pre-unicode unification. That -;; usage was rendered obsolete in 23.1 which uses Unicode internally. -;; Other uses are possible, so this variable is not _really_ obsolete, -;; but Stefan insists to mark it so. -(make-obsolete-variable 'translation-table-for-input nil "23.1") - -(defvaralias 'messages-buffer-max-lines 'message-log-max) - -;;;; Alternate names for functions - these are not being phased out. - -(defalias 'send-string 'process-send-string) -(defalias 'send-region 'process-send-region) -(defalias 'string= 'string-equal) -(defalias 'string< 'string-lessp) -(defalias 'move-marker 'set-marker) -(defalias 'rplaca 'setcar) -(defalias 'rplacd 'setcdr) -(defalias 'beep 'ding) ;preserve lingual purity -(defalias 'indent-to-column 'indent-to) -(defalias 'backward-delete-char 'delete-backward-char) -(defalias 'search-forward-regexp (symbol-function 're-search-forward)) -(defalias 'search-backward-regexp (symbol-function 're-search-backward)) -(defalias 'int-to-string 'number-to-string) -(defalias 'store-match-data 'set-match-data) -(defalias 'chmod 'set-file-modes) -(defalias 'mkdir 'make-directory) -;; These are the XEmacs names: -(defalias 'point-at-eol 'line-end-position) -(defalias 'point-at-bol 'line-beginning-position) - -(defalias 'user-original-login-name 'user-login-name) - - -;;;; Hook manipulation functions. - -(defun add-hook (hook function &optional append local) - "Add to the value of HOOK the function FUNCTION. -FUNCTION is not added if already present. -FUNCTION is added (if necessary) at the beginning of the hook list -unless the optional argument APPEND is non-nil, in which case -FUNCTION is added at the end. - -The optional fourth argument, LOCAL, if non-nil, says to modify -the hook's buffer-local value rather than its global value. -This makes the hook buffer-local, and it makes t a member of the -buffer-local value. That acts as a flag to run the hook -functions of the global value as well as in the local value. - -HOOK should be a symbol, and FUNCTION may be any valid function. If -HOOK is void, it is first set to nil. If HOOK's value is a single -function, it is changed to a list of functions." - (or (boundp hook) (set hook nil)) - (or (default-boundp hook) (set-default hook nil)) - (if local (unless (local-variable-if-set-p hook) - (set (make-local-variable hook) (list t))) - ;; Detect the case where make-local-variable was used on a hook - ;; and do what we used to do. - (unless (and (consp (symbol-value hook)) (memq t (symbol-value hook))) - (setq local t))) - (let ((hook-value (if local (symbol-value hook) (default-value hook)))) - ;; If the hook value is a single function, turn it into a list. - (when (or (not (listp hook-value)) (functionp hook-value)) - (setq hook-value (list hook-value))) - ;; Do the actual addition if necessary - (unless (member function hook-value) - (when (stringp function) - (setq function (purecopy function))) - (setq hook-value - (if append - (append hook-value (list function)) - (cons function hook-value)))) - ;; Set the actual variable - (if local - (progn - ;; If HOOK isn't a permanent local, - ;; but FUNCTION wants to survive a change of modes, - ;; mark HOOK as partially permanent. - (and (symbolp function) - (get function 'permanent-local-hook) - (not (get hook 'permanent-local)) - (put hook 'permanent-local 'permanent-local-hook)) - (set hook hook-value)) - (set-default hook hook-value)))) - -(defun remove-hook (hook function &optional local) - "Remove from the value of HOOK the function FUNCTION. -HOOK should be a symbol, and FUNCTION may be any valid function. If -FUNCTION isn't the value of HOOK, or, if FUNCTION doesn't appear in the -list of hooks to run in HOOK, then nothing is done. See `add-hook'. - -The optional third argument, LOCAL, if non-nil, says to modify -the hook's buffer-local value rather than its default value." - (or (boundp hook) (set hook nil)) - (or (default-boundp hook) (set-default hook nil)) - ;; Do nothing if LOCAL is t but this hook has no local binding. - (unless (and local (not (local-variable-p hook))) - ;; Detect the case where make-local-variable was used on a hook - ;; and do what we used to do. - (when (and (local-variable-p hook) - (not (and (consp (symbol-value hook)) - (memq t (symbol-value hook))))) - (setq local t)) - (let ((hook-value (if local (symbol-value hook) (default-value hook)))) - ;; Remove the function, for both the list and the non-list cases. - (if (or (not (listp hook-value)) (eq (car hook-value) 'lambda)) - (if (equal hook-value function) (setq hook-value nil)) - (setq hook-value (delete function (copy-sequence hook-value)))) - ;; If the function is on the global hook, we need to shadow it locally - ;;(when (and local (member function (default-value hook)) - ;; (not (member (cons 'not function) hook-value))) - ;; (push (cons 'not function) hook-value)) - ;; Set the actual variable - (if (not local) - (set-default hook hook-value) - (if (equal hook-value '(t)) - (kill-local-variable hook) - (set hook hook-value)))))) - -(defmacro letrec (binders &rest body) - "Bind variables according to BINDERS then eval BODY. -The value of the last form in BODY is returned. -Each element of BINDERS is a list (SYMBOL VALUEFORM) which binds -SYMBOL to the value of VALUEFORM. -All symbols are bound before the VALUEFORMs are evalled." - ;; Only useful in lexical-binding mode. - ;; As a special-form, we could implement it more efficiently (and cleanly, - ;; making the vars actually unbound during evaluation of the binders). - (declare (debug let) (indent 1)) - `(let ,(mapcar #'car binders) - ,@(mapcar (lambda (binder) `(setq ,@binder)) binders) - ,@body)) - -(defmacro with-wrapper-hook (hook args &rest body) - "Run BODY, using wrapper functions from HOOK with additional ARGS. -HOOK is an abnormal hook. Each hook function in HOOK \"wraps\" -around the preceding ones, like a set of nested `around' advices. - -Each hook function should accept an argument list consisting of a -function FUN, followed by the additional arguments in ARGS. - -The first hook function in HOOK is passed a FUN that, if it is called -with arguments ARGS, performs BODY (i.e., the default operation). -The FUN passed to each successive hook function is defined based -on the preceding hook functions; if called with arguments ARGS, -it does what the `with-wrapper-hook' call would do if the -preceding hook functions were the only ones present in HOOK. - -Each hook function may call its FUN argument as many times as it wishes, -including never. In that case, such a hook function acts to replace -the default definition altogether, and any preceding hook functions. -Of course, a subsequent hook function may do the same thing. - -Each hook function definition is used to construct the FUN passed -to the next hook function, if any. The last (or \"outermost\") -FUN is then called once." - (declare (indent 2) (debug (form sexp body)) - (obsolete "use a -function variable modified by `add-function'." - "24.4")) - ;; We need those two gensyms because CL's lexical scoping is not available - ;; for function arguments :-( - (let ((funs (make-symbol "funs")) - (global (make-symbol "global")) - (argssym (make-symbol "args")) - (runrestofhook (make-symbol "runrestofhook"))) - ;; Since the hook is a wrapper, the loop has to be done via - ;; recursion: a given hook function will call its parameter in order to - ;; continue looping. - `(letrec ((,runrestofhook - (lambda (,funs ,global ,argssym) - ;; `funs' holds the functions left on the hook and `global' - ;; holds the functions left on the global part of the hook - ;; (in case the hook is local). - (if (consp ,funs) - (if (eq t (car ,funs)) - (funcall ,runrestofhook - (append ,global (cdr ,funs)) nil ,argssym) - (apply (car ,funs) - (apply-partially - (lambda (,funs ,global &rest ,argssym) - (funcall ,runrestofhook ,funs ,global ,argssym)) - (cdr ,funs) ,global) - ,argssym)) - ;; Once there are no more functions on the hook, run - ;; the original body. - (apply (lambda ,args ,@body) ,argssym))))) - (funcall ,runrestofhook ,hook - ;; The global part of the hook, if any. - ,(if (symbolp hook) - `(if (local-variable-p ',hook) - (default-value ',hook))) - (list ,@args))))) - -(defun add-to-list (list-var element &optional append compare-fn) - "Add ELEMENT to the value of LIST-VAR if it isn't there yet. -The test for presence of ELEMENT is done with `equal', or with -COMPARE-FN if that's non-nil. -If ELEMENT is added, it is added at the beginning of the list, -unless the optional argument APPEND is non-nil, in which case -ELEMENT is added at the end. - -The return value is the new value of LIST-VAR. - -This is handy to add some elements to configuration variables, -but please do not abuse it in Elisp code, where you are usually -better off using `push' or `cl-pushnew'. - -If you want to use `add-to-list' on a variable that is not -defined until a certain package is loaded, you should put the -call to `add-to-list' into a hook function that will be run only -after loading the package. `eval-after-load' provides one way to -do this. In some cases other hooks, such as major mode hooks, -can do the job." - (declare - (compiler-macro - (lambda (exp) - ;; FIXME: Something like this could be used for `set' as well. - (if (or (not (eq 'quote (car-safe list-var))) - (special-variable-p (cadr list-var)) - (not (macroexp-const-p append))) - exp - (let* ((sym (cadr list-var)) - (append (eval append)) - (msg (format "`add-to-list' can't use lexical var `%s'; use `push' or `cl-pushnew'" - sym)) - ;; Big ugly hack so we only output a warning during - ;; byte-compilation, and so we can use - ;; byte-compile-not-lexical-var-p to silence the warning - ;; when a defvar has been seen but not yet executed. - (warnfun (lambda () - ;; FIXME: We should also emit a warning for let-bound - ;; variables with dynamic binding. - (when (assq sym byte-compile--lexical-environment) - (byte-compile-log-warning msg t :error)))) - (code - (macroexp-let2 macroexp-copyable-p x element - `(if ,(if compare-fn - (progn - (require 'cl-lib) - `(cl-member ,x ,sym :test ,compare-fn)) - ;; For bootstrapping reasons, don't rely on - ;; cl--compiler-macro-member for the base case. - `(member ,x ,sym)) - ,sym - ,(if append - `(setq ,sym (append ,sym (list ,x))) - `(push ,x ,sym)))))) - (if (not (macroexp--compiling-p)) - code - `(progn - (macroexp--funcall-if-compiled ',warnfun) - ,code))))))) - (if (cond - ((null compare-fn) - (member element (symbol-value list-var))) - ((eq compare-fn 'eq) - (memq element (symbol-value list-var))) - ((eq compare-fn 'eql) - (memql element (symbol-value list-var))) - (t - (let ((lst (symbol-value list-var))) - (while (and lst - (not (funcall compare-fn element (car lst)))) - (setq lst (cdr lst))) - lst))) - (symbol-value list-var) - (set list-var - (if append - (append (symbol-value list-var) (list element)) - (cons element (symbol-value list-var)))))) - - -(defun add-to-ordered-list (list-var element &optional order) - "Add ELEMENT to the value of LIST-VAR if it isn't there yet. -The test for presence of ELEMENT is done with `eq'. - -The resulting list is reordered so that the elements are in the -order given by each element's numeric list order. Elements -without a numeric list order are placed at the end of the list. - -If the third optional argument ORDER is a number (integer or -float), set the element's list order to the given value. If -ORDER is nil or omitted, do not change the numeric order of -ELEMENT. If ORDER has any other value, remove the numeric order -of ELEMENT if it has one. - -The list order for each element is stored in LIST-VAR's -`list-order' property. - -The return value is the new value of LIST-VAR." - (let ((ordering (get list-var 'list-order))) - (unless ordering - (put list-var 'list-order - (setq ordering (make-hash-table :weakness 'key :test 'eq)))) - (when order - (puthash element (and (numberp order) order) ordering)) - (unless (memq element (symbol-value list-var)) - (set list-var (cons element (symbol-value list-var)))) - (set list-var (sort (symbol-value list-var) - (lambda (a b) - (let ((oa (gethash a ordering)) - (ob (gethash b ordering))) - (if (and oa ob) - (< oa ob) - oa))))))) - -(defun add-to-history (history-var newelt &optional maxelt keep-all) - "Add NEWELT to the history list stored in the variable HISTORY-VAR. -Return the new history list. -If MAXELT is non-nil, it specifies the maximum length of the history. -Otherwise, the maximum history length is the value of the `history-length' -property on symbol HISTORY-VAR, if set, or the value of the `history-length' -variable. -Remove duplicates of NEWELT if `history-delete-duplicates' is non-nil. -If optional fourth arg KEEP-ALL is non-nil, add NEWELT to history even -if it is empty or a duplicate." - (unless maxelt - (setq maxelt (or (get history-var 'history-length) - history-length))) - (let ((history (symbol-value history-var)) - tail) - (when (and (listp history) - (or keep-all - (not (stringp newelt)) - (> (length newelt) 0)) - (or keep-all - (not (equal (car history) newelt)))) - (if history-delete-duplicates - (setq history (delete newelt history))) - (setq history (cons newelt history)) - (when (integerp maxelt) - (if (= 0 maxelt) - (setq history nil) - (setq tail (nthcdr (1- maxelt) history)) - (when (consp tail) - (setcdr tail nil))))) - (set history-var history))) - - -;;;; Mode hooks. - -(defvar delay-mode-hooks nil - "If non-nil, `run-mode-hooks' should delay running the hooks.") -(defvar delayed-mode-hooks nil - "List of delayed mode hooks waiting to be run.") -(make-variable-buffer-local 'delayed-mode-hooks) -(put 'delay-mode-hooks 'permanent-local t) - -(defvar change-major-mode-after-body-hook nil - "Normal hook run in major mode functions, before the mode hooks.") - -(defvar after-change-major-mode-hook nil - "Normal hook run at the very end of major mode functions.") - -(defun run-mode-hooks (&rest hooks) - "Run mode hooks `delayed-mode-hooks' and HOOKS, or delay HOOKS. -If the variable `delay-mode-hooks' is non-nil, does not run any hooks, -just adds the HOOKS to the list `delayed-mode-hooks'. -Otherwise, runs hooks in the sequence: `change-major-mode-after-body-hook', -`delayed-mode-hooks' (in reverse order), HOOKS, and finally -`after-change-major-mode-hook'. Major mode functions should use -this instead of `run-hooks' when running their FOO-mode-hook." - (if delay-mode-hooks - ;; Delaying case. - (dolist (hook hooks) - (push hook delayed-mode-hooks)) - ;; Normal case, just run the hook as before plus any delayed hooks. - (setq hooks (nconc (nreverse delayed-mode-hooks) hooks)) - (setq delayed-mode-hooks nil) - (apply 'run-hooks (cons 'change-major-mode-after-body-hook hooks)) - (run-hooks 'after-change-major-mode-hook))) - -(defmacro delay-mode-hooks (&rest body) - "Execute BODY, but delay any `run-mode-hooks'. -These hooks will be executed by the first following call to -`run-mode-hooks' that occurs outside any `delayed-mode-hooks' form. -Only affects hooks run in the current buffer." - (declare (debug t) (indent 0)) - `(progn - (make-local-variable 'delay-mode-hooks) - (let ((delay-mode-hooks t)) - ,@body))) - -;; PUBLIC: find if the current mode derives from another. - -(defun derived-mode-p (&rest modes) - "Non-nil if the current major mode is derived from one of MODES. -Uses the `derived-mode-parent' property of the symbol to trace backwards." - (let ((parent major-mode)) - (while (and (not (memq parent modes)) - (setq parent (get parent 'derived-mode-parent)))) - parent)) - -;;;; Minor modes. - -;; If a minor mode is not defined with define-minor-mode, -;; add it here explicitly. -;; isearch-mode is deliberately excluded, since you should -;; not call it yourself. -(defvar minor-mode-list '(auto-save-mode auto-fill-mode abbrev-mode - overwrite-mode view-mode - hs-minor-mode) - "List of all minor mode functions.") - -(defun add-minor-mode (toggle name &optional keymap after toggle-fun) - "Register a new minor mode. - -This is an XEmacs-compatibility function. Use `define-minor-mode' instead. - -TOGGLE is a symbol which is the name of a buffer-local variable that -is toggled on or off to say whether the minor mode is active or not. - -NAME specifies what will appear in the mode line when the minor mode -is active. NAME should be either a string starting with a space, or a -symbol whose value is such a string. - -Optional KEYMAP is the keymap for the minor mode that will be added -to `minor-mode-map-alist'. - -Optional AFTER specifies that TOGGLE should be added after AFTER -in `minor-mode-alist'. - -Optional TOGGLE-FUN is an interactive function to toggle the mode. -It defaults to (and should by convention be) TOGGLE. - -If TOGGLE has a non-nil `:included' property, an entry for the mode is -included in the mode-line minor mode menu. -If TOGGLE has a `:menu-tag', that is used for the menu item's label." - (unless (memq toggle minor-mode-list) - (push toggle minor-mode-list)) - - (unless toggle-fun (setq toggle-fun toggle)) - (unless (eq toggle-fun toggle) - (put toggle :minor-mode-function toggle-fun)) - ;; Add the name to the minor-mode-alist. - (when name - (let ((existing (assq toggle minor-mode-alist))) - (if existing - (setcdr existing (list name)) - (let ((tail minor-mode-alist) found) - (while (and tail (not found)) - (if (eq after (caar tail)) - (setq found tail) - (setq tail (cdr tail)))) - (if found - (let ((rest (cdr found))) - (setcdr found nil) - (nconc found (list (list toggle name)) rest)) - (push (list toggle name) minor-mode-alist)))))) - ;; Add the toggle to the minor-modes menu if requested. - (when (get toggle :included) - (define-key mode-line-mode-menu - (vector toggle) - (list 'menu-item - (concat - (or (get toggle :menu-tag) - (if (stringp name) name (symbol-name toggle))) - (let ((mode-name (if (symbolp name) (symbol-value name)))) - (if (and (stringp mode-name) (string-match "[^ ]+" mode-name)) - (concat " (" (match-string 0 mode-name) ")")))) - toggle-fun - :button (cons :toggle toggle)))) - - ;; Add the map to the minor-mode-map-alist. - (when keymap - (let ((existing (assq toggle minor-mode-map-alist))) - (if existing - (setcdr existing keymap) - (let ((tail minor-mode-map-alist) found) - (while (and tail (not found)) - (if (eq after (caar tail)) - (setq found tail) - (setq tail (cdr tail)))) - (if found - (let ((rest (cdr found))) - (setcdr found nil) - (nconc found (list (cons toggle keymap)) rest)) - (push (cons toggle keymap) minor-mode-map-alist))))))) - -;;;; Load history - -(defsubst autoloadp (object) - "Non-nil if OBJECT is an autoload." - (eq 'autoload (car-safe object))) - -;; (defun autoload-type (object) -;; "Returns the type of OBJECT or `function' or `command' if the type is nil. -;; OBJECT should be an autoload object." -;; (when (autoloadp object) -;; (let ((type (nth 3 object))) -;; (cond ((null type) (if (nth 2 object) 'command 'function)) -;; ((eq 'keymap t) 'macro) -;; (type))))) - -;; (defalias 'autoload-file #'cadr -;; "Return the name of the file from which AUTOLOAD will be loaded. -;; \n\(fn AUTOLOAD)") - -(defun symbol-file (symbol &optional type) - "Return the name of the file that defined SYMBOL. -The value is normally an absolute file name. It can also be nil, -if the definition is not associated with any file. If SYMBOL -specifies an autoloaded function, the value can be a relative -file name without extension. - -If TYPE is nil, then any kind of definition is acceptable. If -TYPE is `defun', `defvar', or `defface', that specifies function -definition, variable definition, or face definition only." - (if (and (or (null type) (eq type 'defun)) - (symbolp symbol) - (autoloadp (symbol-function symbol))) - (nth 1 (symbol-function symbol)) - (let ((files load-history) - file) - (while files - (if (if type - (if (eq type 'defvar) - ;; Variables are present just as their names. - (member symbol (cdr (car files))) - ;; Other types are represented as (TYPE . NAME). - (member (cons type symbol) (cdr (car files)))) - ;; We accept all types, so look for variable def - ;; and then for any other kind. - (or (member symbol (cdr (car files))) - (rassq symbol (cdr (car files))))) - (setq file (car (car files)) files nil)) - (setq files (cdr files))) - file))) - -(defun locate-library (library &optional nosuffix path interactive-call) - "Show the precise file name of Emacs library LIBRARY. -LIBRARY should be a relative file name of the library, a string. -It can omit the suffix (a.k.a. file-name extension) if NOSUFFIX is -nil (which is the default, see below). -This command searches the directories in `load-path' like `\\[load-library]' -to find the file that `\\[load-library] RET LIBRARY RET' would load. -Optional second arg NOSUFFIX non-nil means don't add suffixes `load-suffixes' -to the specified name LIBRARY. - -If the optional third arg PATH is specified, that list of directories -is used instead of `load-path'. - -When called from a program, the file name is normally returned as a -string. When run interactively, the argument INTERACTIVE-CALL is t, -and the file name is displayed in the echo area." - (interactive (list (completing-read "Locate library: " - (apply-partially - 'locate-file-completion-table - load-path (get-load-suffixes))) - nil nil - t)) - (let ((file (locate-file library - (or path load-path) - (append (unless nosuffix (get-load-suffixes)) - load-file-rep-suffixes)))) - (if interactive-call - (if file - (message "Library is file %s" (abbreviate-file-name file)) - (message "No library %s in search path" library))) - file)) - - -;;;; Process stuff. - -(defun process-lines (program &rest args) - "Execute PROGRAM with ARGS, returning its output as a list of lines. -Signal an error if the program returns with a non-zero exit status." - (with-temp-buffer - (let ((status (apply 'call-process program nil (current-buffer) nil args))) - (unless (eq status 0) - (error "%s exited with status %s" program status)) - (goto-char (point-min)) - (let (lines) - (while (not (eobp)) - (setq lines (cons (buffer-substring-no-properties - (line-beginning-position) - (line-end-position)) - lines)) - (forward-line 1)) - (nreverse lines))))) - -(defun process-live-p (process) - "Returns non-nil if PROCESS is alive. -A process is considered alive if its status is `run', `open', -`listen', `connect' or `stop'. Value is nil if PROCESS is not a -process." - (and (processp process) - (memq (process-status process) - '(run open listen connect stop)))) - -;; compatibility - -(make-obsolete - 'process-kill-without-query - "use `process-query-on-exit-flag' or `set-process-query-on-exit-flag'." - "22.1") -(defun process-kill-without-query (process &optional _flag) - "Say no query needed if PROCESS is running when Emacs is exited. -Optional second argument if non-nil says to require a query. -Value is t if a query was formerly required." - (let ((old (process-query-on-exit-flag process))) - (set-process-query-on-exit-flag process nil) - old)) - -(defun process-kill-buffer-query-function () - "Ask before killing a buffer that has a running process." - (let ((process (get-buffer-process (current-buffer)))) - (or (not process) - (not (memq (process-status process) '(run stop open listen))) - (not (process-query-on-exit-flag process)) - (yes-or-no-p - (format "Buffer %S has a running process; kill it? " - (buffer-name (current-buffer))))))) - -(add-hook 'kill-buffer-query-functions 'process-kill-buffer-query-function) - -;; process plist management - -(defun process-get (process propname) - "Return the value of PROCESS' PROPNAME property. -This is the last value stored with `(process-put PROCESS PROPNAME VALUE)'." - (plist-get (process-plist process) propname)) - -(defun process-put (process propname value) - "Change PROCESS' PROPNAME property to VALUE. -It can be retrieved with `(process-get PROCESS PROPNAME)'." - (set-process-plist process - (plist-put (process-plist process) propname value))) - - -;;;; Input and display facilities. - -(defconst read-key-empty-map (make-sparse-keymap)) - -(defvar read-key-delay 0.01) ;Fast enough for 100Hz repeat rate, hopefully. - -(defun read-key (&optional prompt) - "Read a key from the keyboard. -Contrary to `read-event' this will not return a raw event but instead will -obey the input decoding and translations usually done by `read-key-sequence'. -So escape sequences and keyboard encoding are taken into account. -When there's an ambiguity because the key looks like the prefix of -some sort of escape sequence, the ambiguity is resolved via `read-key-delay'." - ;; This overriding-terminal-local-map binding also happens to - ;; disable quail's input methods, so although read-key-sequence - ;; always inherits the input method, in practice read-key does not - ;; inherit the input method (at least not if it's based on quail). - (let ((overriding-terminal-local-map nil) - (overriding-local-map read-key-empty-map) - (echo-keystrokes 0) - (old-global-map (current-global-map)) - (timer (run-with-idle-timer - ;; Wait long enough that Emacs has the time to receive and - ;; process all the raw events associated with the single-key. - ;; But don't wait too long, or the user may find the delay - ;; annoying (or keep hitting more keys which may then get - ;; lost or misinterpreted). - ;; This is only relevant for keys which Emacs perceives as - ;; "prefixes", such as C-x (because of the C-x 8 map in - ;; key-translate-table and the C-x @ map in function-key-map) - ;; or ESC (because of terminal escape sequences in - ;; input-decode-map). - read-key-delay t - (lambda () - (let ((keys (this-command-keys-vector))) - (unless (zerop (length keys)) - ;; `keys' is non-empty, so the user has hit at least - ;; one key; there's no point waiting any longer, even - ;; though read-key-sequence thinks we should wait - ;; for more input to decide how to interpret the - ;; current input. - (throw 'read-key keys))))))) - (unwind-protect - (progn - (use-global-map - (let ((map (make-sparse-keymap))) - ;; Don't hide the menu-bar and tool-bar entries. - (define-key map [menu-bar] (lookup-key global-map [menu-bar])) - (define-key map [tool-bar] - ;; This hack avoids evaluating the :filter (Bug#9922). - (or (cdr (assq 'tool-bar global-map)) - (lookup-key global-map [tool-bar]))) - map)) - (let* ((keys - (catch 'read-key (read-key-sequence-vector prompt nil t))) - (key (aref keys 0))) - (if (and (> (length keys) 1) - (memq key '(mode-line header-line - left-fringe right-fringe))) - (aref keys 1) - key))) - (cancel-timer timer) - (use-global-map old-global-map)))) - -(defvar read-passwd-map - ;; BEWARE: `defconst' would purecopy it, breaking the sharing with - ;; minibuffer-local-map along the way! - (let ((map (make-sparse-keymap))) - (set-keymap-parent map minibuffer-local-map) - (define-key map "\C-u" #'delete-minibuffer-contents) ;bug#12570 - map) - "Keymap used while reading passwords.") - -(defun read-passwd (prompt &optional confirm default) - "Read a password, prompting with PROMPT, and return it. -If optional CONFIRM is non-nil, read the password twice to make sure. -Optional DEFAULT is a default password to use instead of empty input. - -This function echoes `.' for each character that the user types. -You could let-bind `read-hide-char' to another hiding character, though. - -Once the caller uses the password, it can erase the password -by doing (clear-string STRING)." - (if confirm - (let (success) - (while (not success) - (let ((first (read-passwd prompt nil default)) - (second (read-passwd "Confirm password: " nil default))) - (if (equal first second) - (progn - (and (arrayp second) (clear-string second)) - (setq success first)) - (and (arrayp first) (clear-string first)) - (and (arrayp second) (clear-string second)) - (message "Password not repeated accurately; please start over") - (sit-for 1)))) - success) - (let ((hide-chars-fun - (lambda (beg end _len) - (clear-this-command-keys) - (setq beg (min end (max (minibuffer-prompt-end) - beg))) - (dotimes (i (- end beg)) - (put-text-property (+ i beg) (+ 1 i beg) - 'display (string (or read-hide-char ?.)))))) - minibuf) - (minibuffer-with-setup-hook - (lambda () - (setq minibuf (current-buffer)) - ;; Turn off electricity. - (setq-local post-self-insert-hook nil) - (setq-local buffer-undo-list t) - (setq-local select-active-regions nil) - (use-local-map read-passwd-map) - (setq-local inhibit-modification-hooks nil) ;bug#15501. - (setq-local show-paren-mode nil) ;bug#16091. - (add-hook 'after-change-functions hide-chars-fun nil 'local)) - (unwind-protect - (let ((enable-recursive-minibuffers t) - (read-hide-char (or read-hide-char ?.))) - (read-string prompt nil t default)) ; t = "no history" - (when (buffer-live-p minibuf) - (with-current-buffer minibuf - ;; Not sure why but it seems that there might be cases where the - ;; minibuffer is not always properly reset later on, so undo - ;; whatever we've done here (bug#11392). - (remove-hook 'after-change-functions hide-chars-fun 'local) - (kill-local-variable 'post-self-insert-hook) - ;; And of course, don't keep the sensitive data around. - (erase-buffer)))))))) - -(defun read-number (prompt &optional default) - "Read a numeric value in the minibuffer, prompting with PROMPT. -DEFAULT specifies a default value to return if the user just types RET. -The value of DEFAULT is inserted into PROMPT. -This function is used by the `interactive' code letter `n'." - (let ((n nil) - (default1 (if (consp default) (car default) default))) - (when default1 - (setq prompt - (if (string-match "\\(\\):[ \t]*\\'" prompt) - (replace-match (format " (default %s)" default1) t t prompt 1) - (replace-regexp-in-string "[ \t]*\\'" - (format " (default %s) " default1) - prompt t t)))) - (while - (progn - (let ((str (read-from-minibuffer - prompt nil nil nil nil - (when default - (if (consp default) - (mapcar 'number-to-string (delq nil default)) - (number-to-string default)))))) - (condition-case nil - (setq n (cond - ((zerop (length str)) default1) - ((stringp str) (read str)))) - (error nil))) - (unless (numberp n) - (message "Please enter a number.") - (sit-for 1) - t))) - n)) - -(defun read-char-choice (prompt chars &optional inhibit-keyboard-quit) - "Read and return one of CHARS, prompting for PROMPT. -Any input that is not one of CHARS is ignored. - -If optional argument INHIBIT-KEYBOARD-QUIT is non-nil, ignore -keyboard-quit events while waiting for a valid input." - (unless (consp chars) - (error "Called `read-char-choice' without valid char choices")) - (let (char done show-help (helpbuf " *Char Help*")) - (let ((cursor-in-echo-area t) - (executing-kbd-macro executing-kbd-macro) - (esc-flag nil)) - (save-window-excursion ; in case we call help-form-show - (while (not done) - (unless (get-text-property 0 'face prompt) - (setq prompt (propertize prompt 'face 'minibuffer-prompt))) - (setq char (let ((inhibit-quit inhibit-keyboard-quit)) - (read-key prompt))) - (and show-help (buffer-live-p (get-buffer helpbuf)) - (kill-buffer helpbuf)) - (cond - ((not (numberp char))) - ;; If caller has set help-form, that's enough. - ;; They don't explicitly have to add help-char to chars. - ((and help-form - (eq char help-char) - (setq show-help t) - (help-form-show))) - ((memq char chars) - (setq done t)) - ((and executing-kbd-macro (= char -1)) - ;; read-event returns -1 if we are in a kbd macro and - ;; there are no more events in the macro. Attempt to - ;; get an event interactively. - (setq executing-kbd-macro nil)) - ((not inhibit-keyboard-quit) - (cond - ((and (null esc-flag) (eq char ?\e)) - (setq esc-flag t)) - ((memq char '(?\C-g ?\e)) - (keyboard-quit)))))))) - ;; Display the question with the answer. But without cursor-in-echo-area. - (message "%s%s" prompt (char-to-string char)) - char)) - -(defun sit-for (seconds &optional nodisp obsolete) - "Redisplay, then wait for SECONDS seconds. Stop when input is available. -SECONDS may be a floating-point value. -\(On operating systems that do not support waiting for fractions of a -second, floating-point values are rounded down to the nearest integer.) - -If optional arg NODISP is t, don't redisplay, just wait for input. -Redisplay does not happen if input is available before it starts. - -Value is t if waited the full time with no input arriving, and nil otherwise. - -An obsolete, but still supported form is -\(sit-for SECONDS &optional MILLISECONDS NODISP) -where the optional arg MILLISECONDS specifies an additional wait period, -in milliseconds; this was useful when Emacs was built without -floating point support." - (declare (advertised-calling-convention (seconds &optional nodisp) "22.1")) - ;; This used to be implemented in C until the following discussion: - ;; http://lists.gnu.org/archive/html/emacs-devel/2006-07/msg00401.html - ;; Then it was moved here using an implementation based on an idle timer, - ;; which was then replaced by the use of read-event. - (if (numberp nodisp) - (setq seconds (+ seconds (* 1e-3 nodisp)) - nodisp obsolete) - (if obsolete (setq nodisp obsolete))) - (cond - (noninteractive - (sleep-for seconds) - t) - ((input-pending-p t) - nil) - ((<= seconds 0) - (or nodisp (redisplay))) - (t - (or nodisp (redisplay)) - ;; FIXME: we should not read-event here at all, because it's much too - ;; difficult to reliably "undo" a read-event by pushing it onto - ;; unread-command-events. - ;; For bug#14782, we need read-event to do the keyboard-coding-system - ;; decoding (hence non-nil as second arg under POSIX ttys). - ;; For bug#15614, we need read-event not to inherit-input-method. - ;; So we temporarily suspend input-method-function. - (let ((read (let ((input-method-function nil)) - (read-event nil t seconds)))) - (or (null read) - (progn - ;; https://lists.gnu.org/archive/html/emacs-devel/2006-10/msg00394.html - ;; We want `read' appear in the next command's this-command-event - ;; but not in the current one. - ;; By pushing (cons t read), we indicate that `read' has not - ;; yet been recorded in this-command-keys, so it will be recorded - ;; next time it's read. - ;; And indeed the `seconds' argument to read-event correctly - ;; prevented recording this event in the current command's - ;; this-command-keys. - (push (cons t read) unread-command-events) - nil)))))) - -;; Behind display-popup-menus-p test. -(declare-function x-popup-dialog "menu.c" (position contents &optional header)) - -(defun y-or-n-p (prompt) - "Ask user a \"y or n\" question. Return t if answer is \"y\". -PROMPT is the string to display to ask the question. It should -end in a space; `y-or-n-p' adds \"(y or n) \" to it. - -No confirmation of the answer is requested; a single character is -enough. SPC also means yes, and DEL means no. - -To be precise, this function translates user input into responses -by consulting the bindings in `query-replace-map'; see the -documentation of that variable for more information. In this -case, the useful bindings are `act', `skip', `recenter', -`scroll-up', `scroll-down', and `quit'. -An `act' response means yes, and a `skip' response means no. -A `quit' response means to invoke `keyboard-quit'. -If the user enters `recenter', `scroll-up', or `scroll-down' -responses, perform the requested window recentering or scrolling -and ask again. - -Under a windowing system a dialog box will be used if `last-nonmenu-event' -is nil and `use-dialog-box' is non-nil." - ;; ¡Beware! when I tried to edebug this code, Emacs got into a weird state - ;; where all the keys were unbound (i.e. it somehow got triggered - ;; within read-key, apparently). I had to kill it. - (let ((answer 'recenter) - (padded (lambda (prompt &optional dialog) - (let ((l (length prompt))) - (concat prompt - (if (or (zerop l) (eq ?\s (aref prompt (1- l)))) - "" " ") - (if dialog "" "(y or n) ")))))) - (cond - (noninteractive - (setq prompt (funcall padded prompt)) - (let ((temp-prompt prompt)) - (while (not (memq answer '(act skip))) - (let ((str (read-string temp-prompt))) - (cond ((member str '("y" "Y")) (setq answer 'act)) - ((member str '("n" "N")) (setq answer 'skip)) - (t (setq temp-prompt (concat "Please answer y or n. " - prompt)))))))) - ((and (display-popup-menus-p) - (listp last-nonmenu-event) - use-dialog-box) - (setq prompt (funcall padded prompt t) - answer (x-popup-dialog t `(,prompt ("Yes" . act) ("No" . skip))))) - (t - (setq prompt (funcall padded prompt)) - (while - (let* ((scroll-actions '(recenter scroll-up scroll-down - scroll-other-window scroll-other-window-down)) - (key - (let ((cursor-in-echo-area t)) - (when minibuffer-auto-raise - (raise-frame (window-frame (minibuffer-window)))) - (read-key (propertize (if (memq answer scroll-actions) - prompt - (concat "Please answer y or n. " - prompt)) - 'face 'minibuffer-prompt))))) - (setq answer (lookup-key query-replace-map (vector key) t)) - (cond - ((memq answer '(skip act)) nil) - ((eq answer 'recenter) - (recenter) t) - ((eq answer 'scroll-up) - (ignore-errors (scroll-up-command)) t) - ((eq answer 'scroll-down) - (ignore-errors (scroll-down-command)) t) - ((eq answer 'scroll-other-window) - (ignore-errors (scroll-other-window)) t) - ((eq answer 'scroll-other-window-down) - (ignore-errors (scroll-other-window-down)) t) - ((or (memq answer '(exit-prefix quit)) (eq key ?\e)) - (signal 'quit nil) t) - (t t))) - (ding) - (discard-input)))) - (let ((ret (eq answer 'act))) - (unless noninteractive - (message "%s%c" prompt (if ret ?y ?n))) - ret))) - - -;;; Atomic change groups. - -(defmacro atomic-change-group (&rest body) - "Perform BODY as an atomic change group. -This means that if BODY exits abnormally, -all of its changes to the current buffer are undone. -This works regardless of whether undo is enabled in the buffer. - -This mechanism is transparent to ordinary use of undo; -if undo is enabled in the buffer and BODY succeeds, the -user can undo the change normally." - (declare (indent 0) (debug t)) - (let ((handle (make-symbol "--change-group-handle--")) - (success (make-symbol "--change-group-success--"))) - `(let ((,handle (prepare-change-group)) - ;; Don't truncate any undo data in the middle of this. - (undo-outer-limit nil) - (undo-limit most-positive-fixnum) - (undo-strong-limit most-positive-fixnum) - (,success nil)) - (unwind-protect - (progn - ;; This is inside the unwind-protect because - ;; it enables undo if that was disabled; we need - ;; to make sure that it gets disabled again. - (activate-change-group ,handle) - ,@body - (setq ,success t)) - ;; Either of these functions will disable undo - ;; if it was disabled before. - (if ,success - (accept-change-group ,handle) - (cancel-change-group ,handle)))))) - -(defun prepare-change-group (&optional buffer) - "Return a handle for the current buffer's state, for a change group. -If you specify BUFFER, make a handle for BUFFER's state instead. - -Pass the handle to `activate-change-group' afterward to initiate -the actual changes of the change group. - -To finish the change group, call either `accept-change-group' or -`cancel-change-group' passing the same handle as argument. Call -`accept-change-group' to accept the changes in the group as final; -call `cancel-change-group' to undo them all. You should use -`unwind-protect' to make sure the group is always finished. The call -to `activate-change-group' should be inside the `unwind-protect'. -Once you finish the group, don't use the handle again--don't try to -finish the same group twice. For a simple example of correct use, see -the source code of `atomic-change-group'. - -The handle records only the specified buffer. To make a multibuffer -change group, call this function once for each buffer you want to -cover, then use `nconc' to combine the returned values, like this: - - (nconc (prepare-change-group buffer-1) - (prepare-change-group buffer-2)) - -You can then activate that multibuffer change group with a single -call to `activate-change-group' and finish it with a single call -to `accept-change-group' or `cancel-change-group'." - - (if buffer - (list (cons buffer (with-current-buffer buffer buffer-undo-list))) - (list (cons (current-buffer) buffer-undo-list)))) - -(defun activate-change-group (handle) - "Activate a change group made with `prepare-change-group' (which see)." - (dolist (elt handle) - (with-current-buffer (car elt) - (if (eq buffer-undo-list t) - (setq buffer-undo-list nil))))) - -(defun accept-change-group (handle) - "Finish a change group made with `prepare-change-group' (which see). -This finishes the change group by accepting its changes as final." - (dolist (elt handle) - (with-current-buffer (car elt) - (if (eq (cdr elt) t) - (setq buffer-undo-list t))))) - -(defun cancel-change-group (handle) - "Finish a change group made with `prepare-change-group' (which see). -This finishes the change group by reverting all of its changes." - (dolist (elt handle) - (with-current-buffer (car elt) - (setq elt (cdr elt)) - (save-restriction - ;; Widen buffer temporarily so if the buffer was narrowed within - ;; the body of `atomic-change-group' all changes can be undone. - (widen) - (let ((old-car - (if (consp elt) (car elt))) - (old-cdr - (if (consp elt) (cdr elt)))) - ;; Temporarily truncate the undo log at ELT. - (when (consp elt) - (setcar elt nil) (setcdr elt nil)) - (unless (eq last-command 'undo) (undo-start)) - ;; Make sure there's no confusion. - (when (and (consp elt) (not (eq elt (last pending-undo-list)))) - (error "Undoing to some unrelated state")) - ;; Undo it all. - (save-excursion - (while (listp pending-undo-list) (undo-more 1))) - ;; Reset the modified cons cell ELT to its original content. - (when (consp elt) - (setcar elt old-car) - (setcdr elt old-cdr)) - ;; Revert the undo info to what it was when we grabbed the state. - (setq buffer-undo-list elt)))))) - -;;;; Display-related functions. - -;; For compatibility. -(define-obsolete-function-alias 'redraw-modeline - 'force-mode-line-update "24.3") - -(defun momentary-string-display (string pos &optional exit-char message) - "Momentarily display STRING in the buffer at POS. -Display remains until next event is input. -If POS is a marker, only its position is used; its buffer is ignored. -Optional third arg EXIT-CHAR can be a character, event or event -description list. EXIT-CHAR defaults to SPC. If the input is -EXIT-CHAR it is swallowed; otherwise it is then available as -input (as a command if nothing else). -Display MESSAGE (optional fourth arg) in the echo area. -If MESSAGE is nil, instructions to type EXIT-CHAR are displayed there." - (or exit-char (setq exit-char ?\s)) - (let ((ol (make-overlay pos pos)) - (str (copy-sequence string))) - (unwind-protect - (progn - (save-excursion - (overlay-put ol 'after-string str) - (goto-char pos) - ;; To avoid trouble with out-of-bounds position - (setq pos (point)) - ;; If the string end is off screen, recenter now. - (if (<= (window-end nil t) pos) - (recenter (/ (window-height) 2)))) - (message (or message "Type %s to continue editing.") - (single-key-description exit-char)) - (let ((event (read-key))) - ;; `exit-char' can be an event, or an event description list. - (or (eq event exit-char) - (eq event (event-convert-list exit-char)) - (setq unread-command-events - (append (this-single-command-raw-keys)))))) - (delete-overlay ol)))) - - -;;;; Overlay operations - -(defun copy-overlay (o) - "Return a copy of overlay O." - (let ((o1 (if (overlay-buffer o) - (make-overlay (overlay-start o) (overlay-end o) - ;; FIXME: there's no easy way to find the - ;; insertion-type of the two markers. - (overlay-buffer o)) - (let ((o1 (make-overlay (point-min) (point-min)))) - (delete-overlay o1) - o1))) - (props (overlay-properties o))) - (while props - (overlay-put o1 (pop props) (pop props))) - o1)) - -(defun remove-overlays (&optional beg end name val) - "Clear BEG and END of overlays whose property NAME has value VAL. -Overlays might be moved and/or split. -BEG and END default respectively to the beginning and end of buffer." - ;; This speeds up the loops over overlays. - (unless beg (setq beg (point-min))) - (unless end (setq end (point-max))) - (overlay-recenter end) - (if (< end beg) - (setq beg (prog1 end (setq end beg)))) - (save-excursion - (dolist (o (overlays-in beg end)) - (when (eq (overlay-get o name) val) - ;; Either push this overlay outside beg...end - ;; or split it to exclude beg...end - ;; or delete it entirely (if it is contained in beg...end). - (if (< (overlay-start o) beg) - (if (> (overlay-end o) end) - (progn - (move-overlay (copy-overlay o) - (overlay-start o) beg) - (move-overlay o end (overlay-end o))) - (move-overlay o (overlay-start o) beg)) - (if (> (overlay-end o) end) - (move-overlay o end (overlay-end o)) - (delete-overlay o))))))) - -;;;; Miscellanea. - -(defvar suspend-hook nil - "Normal hook run by `suspend-emacs', before suspending.") - -(defvar suspend-resume-hook nil - "Normal hook run by `suspend-emacs', after Emacs is continued.") - -(defvar temp-buffer-show-hook nil - "Normal hook run by `with-output-to-temp-buffer' after displaying the buffer. -When the hook runs, the temporary buffer is current, and the window it -was displayed in is selected.") - -(defvar temp-buffer-setup-hook nil - "Normal hook run by `with-output-to-temp-buffer' at the start. -When the hook runs, the temporary buffer is current. -This hook is normally set up with a function to put the buffer in Help -mode.") - -(defconst user-emacs-directory - (if (eq system-type 'ms-dos) - ;; MS-DOS cannot have initial dot. - "~/_emacs.d/" - "~/.emacs.d/") - "Directory beneath which additional per-user Emacs-specific files are placed. -Various programs in Emacs store information in this directory. -Note that this should end with a directory separator. -See also `locate-user-emacs-file'.") - -;;;; Misc. useful functions. - -(defsubst buffer-narrowed-p () - "Return non-nil if the current buffer is narrowed." - (/= (- (point-max) (point-min)) (buffer-size))) - -(defun find-tag-default-bounds () - "Determine the boundaries of the default tag, based on text at point. -Return a cons cell with the beginning and end of the found tag. -If there is no plausible default, return nil." - (let (from to bound) - (when (or (progn - ;; Look at text around `point'. - (save-excursion - (skip-syntax-backward "w_") (setq from (point))) - (save-excursion - (skip-syntax-forward "w_") (setq to (point))) - (> to from)) - ;; Look between `line-beginning-position' and `point'. - (save-excursion - (and (setq bound (line-beginning-position)) - (skip-syntax-backward "^w_" bound) - (> (setq to (point)) bound) - (skip-syntax-backward "w_") - (setq from (point)))) - ;; Look between `point' and `line-end-position'. - (save-excursion - (and (setq bound (line-end-position)) - (skip-syntax-forward "^w_" bound) - (< (setq from (point)) bound) - (skip-syntax-forward "w_") - (setq to (point))))) - (cons from to)))) - -(defun find-tag-default () - "Determine default tag to search for, based on text at point. -If there is no plausible default, return nil." - (let ((bounds (find-tag-default-bounds))) - (when bounds - (buffer-substring-no-properties (car bounds) (cdr bounds))))) - -(defun find-tag-default-as-regexp () - "Return regexp that matches the default tag at point. -If there is no tag at point, return nil. - -When in a major mode that does not provide its own -`find-tag-default-function', return a regexp that matches the -symbol at point exactly." - (let ((tag (funcall (or find-tag-default-function - (get major-mode 'find-tag-default-function) - 'find-tag-default)))) - (if tag (regexp-quote tag)))) - -(defun find-tag-default-as-symbol-regexp () - "Return regexp that matches the default tag at point as symbol. -If there is no tag at point, return nil. - -When in a major mode that does not provide its own -`find-tag-default-function', return a regexp that matches the -symbol at point exactly." - (let ((tag-regexp (find-tag-default-as-regexp))) - (if (and tag-regexp - (eq (or find-tag-default-function - (get major-mode 'find-tag-default-function) - 'find-tag-default) - 'find-tag-default)) - (format "\\_<%s\\_>" tag-regexp) - tag-regexp))) - -(defun play-sound (sound) - "SOUND is a list of the form `(sound KEYWORD VALUE...)'. -The following keywords are recognized: - - :file FILE - read sound data from FILE. If FILE isn't an -absolute file name, it is searched in `data-directory'. - - :data DATA - read sound data from string DATA. - -Exactly one of :file or :data must be present. - - :volume VOL - set volume to VOL. VOL must an integer in the -range 0..100 or a float in the range 0..1.0. If not specified, -don't change the volume setting of the sound device. - - :device DEVICE - play sound on DEVICE. If not specified, -a system-dependent default device name is used. - -Note: :data and :device are currently not supported on Windows." - (if (fboundp 'play-sound-internal) - (play-sound-internal sound) - (error "This Emacs binary lacks sound support"))) - -(declare-function w32-shell-dos-semantics "w32-fns" nil) - -(defun shell-quote-argument (argument) - "Quote ARGUMENT for passing as argument to an inferior shell." - (cond - ((eq system-type 'ms-dos) - ;; Quote using double quotes, but escape any existing quotes in - ;; the argument with backslashes. - (let ((result "") - (start 0) - end) - (if (or (null (string-match "[^\"]" argument)) - (< (match-end 0) (length argument))) - (while (string-match "[\"]" argument start) - (setq end (match-beginning 0) - result (concat result (substring argument start end) - "\\" (substring argument end (1+ end))) - start (1+ end)))) - (concat "\"" result (substring argument start) "\""))) - - ((and (eq system-type 'windows-nt) (w32-shell-dos-semantics)) - - ;; First, quote argument so that CommandLineToArgvW will - ;; understand it. See - ;; http://msdn.microsoft.com/en-us/library/17w5ykft%28v=vs.85%29.aspx - ;; After we perform that level of quoting, escape shell - ;; metacharacters so that cmd won't mangle our argument. If the - ;; argument contains no double quote characters, we can just - ;; surround it with double quotes. Otherwise, we need to prefix - ;; each shell metacharacter with a caret. - - (setq argument - ;; escape backslashes at end of string - (replace-regexp-in-string - "\\(\\\\*\\)$" - "\\1\\1" - ;; escape backslashes and quotes in string body - (replace-regexp-in-string - "\\(\\\\*\\)\"" - "\\1\\1\\\\\"" - argument))) - - (if (string-match "[%!\"]" argument) - (concat - "^\"" - (replace-regexp-in-string - "\\([%!()\"<>&|^]\\)" - "^\\1" - argument) - "^\"") - (concat "\"" argument "\""))) - - (t - (if (equal argument "") - "''" - ;; Quote everything except POSIX filename characters. - ;; This should be safe enough even for really weird shells. - (replace-regexp-in-string - "\n" "'\n'" - (replace-regexp-in-string "[^-0-9a-zA-Z_./\n]" "\\\\\\&" argument)))) - )) - -(defun string-or-null-p (object) - "Return t if OBJECT is a string or nil. -Otherwise, return nil." - (or (stringp object) (null object))) - -(defun booleanp (object) - "Return t if OBJECT is one of the two canonical boolean values: t or nil. -Otherwise, return nil." - (and (memq object '(nil t)) t)) - -(defun special-form-p (object) - "Non-nil if and only if OBJECT is a special form." - (if (and (symbolp object) (fboundp object)) - (setq object (indirect-function object t))) - (and (subrp object) (eq (cdr (subr-arity object)) 'unevalled))) - -(defun macrop (object) - "Non-nil if and only if OBJECT is a macro." - (let ((def (indirect-function object t))) - (when (consp def) - (or (eq 'macro (car def)) - (and (autoloadp def) (memq (nth 4 def) '(macro t))))))) - -(defun field-at-pos (pos) - "Return the field at position POS, taking stickiness etc into account." - (let ((raw-field (get-char-property (field-beginning pos) 'field))) - (if (eq raw-field 'boundary) - (get-char-property (1- (field-end pos)) 'field) - raw-field))) - -(defun sha1 (object &optional start end binary) - "Return the SHA1 (Secure Hash Algorithm) of an OBJECT. -OBJECT is either a string or a buffer. Optional arguments START and -END are character positions specifying which portion of OBJECT for -computing the hash. If BINARY is non-nil, return a string in binary -form." - (secure-hash 'sha1 object start end binary)) - -(defun function-get (f prop &optional autoload) - "Return the value of property PROP of function F. -If AUTOLOAD is non-nil and F is autoloaded, try to autoload it -in the hope that it will set PROP. If AUTOLOAD is `macro', only do it -if it's an autoloaded macro." - (let ((val nil)) - (while (and (symbolp f) - (null (setq val (get f prop))) - (fboundp f)) - (let ((fundef (symbol-function f))) - (if (and autoload (autoloadp fundef) - (not (equal fundef - (autoload-do-load fundef f - (if (eq autoload 'macro) - 'macro))))) - nil ;Re-try `get' on the same `f'. - (setq f fundef)))) - val)) - -;;;; Support for yanking and text properties. -;; Why here in subr.el rather than in simple.el? --Stef - -(defvar yank-handled-properties) -(defvar yank-excluded-properties) - -(defun remove-yank-excluded-properties (start end) - "Process text properties between START and END, inserted for a `yank'. -Perform the handling specified by `yank-handled-properties', then -remove properties specified by `yank-excluded-properties'." - (let ((inhibit-read-only t)) - (dolist (handler yank-handled-properties) - (let ((prop (car handler)) - (fun (cdr handler)) - (run-start start)) - (while (< run-start end) - (let ((value (get-text-property run-start prop)) - (run-end (next-single-property-change - run-start prop nil end))) - (funcall fun value run-start run-end) - (setq run-start run-end))))) - (if (eq yank-excluded-properties t) - (set-text-properties start end nil) - (remove-list-of-text-properties start end yank-excluded-properties)))) - -(defvar yank-undo-function) - -(defun insert-for-yank (string) - "Call `insert-for-yank-1' repetitively for each `yank-handler' segment. - -See `insert-for-yank-1' for more details." - (let (to) - (while (setq to (next-single-property-change 0 'yank-handler string)) - (insert-for-yank-1 (substring string 0 to)) - (setq string (substring string to)))) - (insert-for-yank-1 string)) - -(defun insert-for-yank-1 (string) - "Insert STRING at point for the `yank' command. -This function is like `insert', except it honors the variables -`yank-handled-properties' and `yank-excluded-properties', and the -`yank-handler' text property. - -Properties listed in `yank-handled-properties' are processed, -then those listed in `yank-excluded-properties' are discarded. - -If STRING has a non-nil `yank-handler' property on its first -character, the normal insert behavior is altered. The value of -the `yank-handler' property must be a list of one to four -elements, of the form (FUNCTION PARAM NOEXCLUDE UNDO). -FUNCTION, if non-nil, should be a function of one argument, an - object to insert; it is called instead of `insert'. -PARAM, if present and non-nil, replaces STRING as the argument to - FUNCTION or `insert'; e.g. if FUNCTION is `yank-rectangle', PARAM - may be a list of strings to insert as a rectangle. -If NOEXCLUDE is present and non-nil, the normal removal of - `yank-excluded-properties' is not performed; instead FUNCTION is - responsible for the removal. This may be necessary if FUNCTION - adjusts point before or after inserting the object. -UNDO, if present and non-nil, should be a function to be called - by `yank-pop' to undo the insertion of the current object. It is - given two arguments, the start and end of the region. FUNCTION - may set `yank-undo-function' to override UNDO." - (let* ((handler (and (stringp string) - (get-text-property 0 'yank-handler string))) - (param (or (nth 1 handler) string)) - (opoint (point)) - (inhibit-read-only inhibit-read-only) - end) - - (setq yank-undo-function t) - (if (nth 0 handler) ; FUNCTION - (funcall (car handler) param) - (insert param)) - (setq end (point)) - - ;; Prevent read-only properties from interfering with the - ;; following text property changes. - (setq inhibit-read-only t) - - (unless (nth 2 handler) ; NOEXCLUDE - (remove-yank-excluded-properties opoint end)) - - ;; If last inserted char has properties, mark them as rear-nonsticky. - (if (and (> end opoint) - (text-properties-at (1- end))) - (put-text-property (1- end) end 'rear-nonsticky t)) - - (if (eq yank-undo-function t) ; not set by FUNCTION - (setq yank-undo-function (nth 3 handler))) ; UNDO - (if (nth 4 handler) ; COMMAND - (setq this-command (nth 4 handler))))) - -(defun insert-buffer-substring-no-properties (buffer &optional start end) - "Insert before point a substring of BUFFER, without text properties. -BUFFER may be a buffer or a buffer name. -Arguments START and END are character positions specifying the substring. -They default to the values of (point-min) and (point-max) in BUFFER." - (let ((opoint (point))) - (insert-buffer-substring buffer start end) - (let ((inhibit-read-only t)) - (set-text-properties opoint (point) nil)))) - -(defun insert-buffer-substring-as-yank (buffer &optional start end) - "Insert before point a part of BUFFER, stripping some text properties. -BUFFER may be a buffer or a buffer name. -Arguments START and END are character positions specifying the substring. -They default to the values of (point-min) and (point-max) in BUFFER. -Before insertion, process text properties according to -`yank-handled-properties' and `yank-excluded-properties'." - ;; Since the buffer text should not normally have yank-handler properties, - ;; there is no need to handle them here. - (let ((opoint (point))) - (insert-buffer-substring buffer start end) - (remove-yank-excluded-properties opoint (point)))) - -(defun yank-handle-font-lock-face-property (face start end) - "If `font-lock-defaults' is nil, apply FACE as a `face' property. -START and END denote the start and end of the text to act on. -Do nothing if FACE is nil." - (and face - (null font-lock-defaults) - (put-text-property start end 'face face))) - -;; This removes `mouse-face' properties in *Help* buffer buttons: -;; http://lists.gnu.org/archive/html/emacs-devel/2002-04/msg00648.html -(defun yank-handle-category-property (category start end) - "Apply property category CATEGORY's properties between START and END." - (when category - (let ((start2 start)) - (while (< start2 end) - (let ((end2 (next-property-change start2 nil end)) - (original (text-properties-at start2))) - (set-text-properties start2 end2 (symbol-plist category)) - (add-text-properties start2 end2 original) - (setq start2 end2)))))) - - -;;;; Synchronous shell commands. - -(defun start-process-shell-command (name buffer &rest args) - "Start a program in a subprocess. Return the process object for it. -NAME is name for process. It is modified if necessary to make it unique. -BUFFER is the buffer (or buffer name) to associate with the process. - Process output goes at end of that buffer, unless you specify - an output stream or filter function to handle the output. - BUFFER may be also nil, meaning that this process is not associated - with any buffer -COMMAND is the shell command to run. - -An old calling convention accepted any number of arguments after COMMAND, -which were just concatenated to COMMAND. This is still supported but strongly -discouraged." - (declare (advertised-calling-convention (name buffer command) "23.1")) - ;; We used to use `exec' to replace the shell with the command, - ;; but that failed to handle (...) and semicolon, etc. - (start-process name buffer shell-file-name shell-command-switch - (mapconcat 'identity args " "))) - -(defun start-file-process-shell-command (name buffer &rest args) - "Start a program in a subprocess. Return the process object for it. -Similar to `start-process-shell-command', but calls `start-file-process'." - (declare (advertised-calling-convention (name buffer command) "23.1")) - (start-file-process - name buffer - (if (file-remote-p default-directory) "/bin/sh" shell-file-name) - (if (file-remote-p default-directory) "-c" shell-command-switch) - (mapconcat 'identity args " "))) - -(defun call-process-shell-command (command &optional infile buffer display - &rest args) - "Execute the shell command COMMAND synchronously in separate process. -The remaining arguments are optional. -The program's input comes from file INFILE (nil means `/dev/null'). -Insert output in BUFFER before point; t means current buffer; - nil for BUFFER means discard it; 0 means discard and don't wait. -BUFFER can also have the form (REAL-BUFFER STDERR-FILE); in that case, -REAL-BUFFER says what to do with standard output, as above, -while STDERR-FILE says what to do with standard error in the child. -STDERR-FILE may be nil (discard standard error output), -t (mix it with ordinary output), or a file name string. - -Fourth arg DISPLAY non-nil means redisplay buffer as output is inserted. -Wildcards and redirection are handled as usual in the shell. - -If BUFFER is 0, `call-process-shell-command' returns immediately with value nil. -Otherwise it waits for COMMAND to terminate and returns a numeric exit -status or a signal description string. -If you quit, the process is killed with SIGINT, or SIGKILL if you quit again. - -An old calling convention accepted any number of arguments after DISPLAY, -which were just concatenated to COMMAND. This is still supported but strongly -discouraged." - (declare (advertised-calling-convention - (command &optional infile buffer display) "24.5")) - ;; We used to use `exec' to replace the shell with the command, - ;; but that failed to handle (...) and semicolon, etc. - (call-process shell-file-name - infile buffer display - shell-command-switch - (mapconcat 'identity (cons command args) " "))) - -(defun process-file-shell-command (command &optional infile buffer display - &rest args) - "Process files synchronously in a separate process. -Similar to `call-process-shell-command', but calls `process-file'." - (declare (advertised-calling-convention - (command &optional infile buffer display) "24.5")) - (process-file - (if (file-remote-p default-directory) "/bin/sh" shell-file-name) - infile buffer display - (if (file-remote-p default-directory) "-c" shell-command-switch) - (mapconcat 'identity (cons command args) " "))) - -;;;; Lisp macros to do various things temporarily. - -(defmacro track-mouse (&rest body) - "Evaluate BODY with mouse movement events enabled. -Within a `track-mouse' form, mouse motion generates input events that - you can read with `read-event'. -Normally, mouse motion is ignored." - (declare (debug t) (indent 0)) - `(internal--track-mouse (lambda () ,@body))) - -(defmacro with-current-buffer (buffer-or-name &rest body) - "Execute the forms in BODY with BUFFER-OR-NAME temporarily current. -BUFFER-OR-NAME must be a buffer or the name of an existing buffer. -The value returned is the value of the last form in BODY. See -also `with-temp-buffer'." - (declare (indent 1) (debug t)) - `(save-current-buffer - (set-buffer ,buffer-or-name) - ,@body)) - -(defun internal--before-with-selected-window (window) - (let ((other-frame (window-frame window))) - (list window (selected-window) - ;; Selecting a window on another frame also changes that - ;; frame's frame-selected-window. We must save&restore it. - (unless (eq (selected-frame) other-frame) - (frame-selected-window other-frame)) - ;; Also remember the top-frame if on ttys. - (unless (eq (selected-frame) other-frame) - (tty-top-frame other-frame))))) - -(defun internal--after-with-selected-window (state) - ;; First reset frame-selected-window. - (when (window-live-p (nth 2 state)) - ;; We don't use set-frame-selected-window because it does not - ;; pass the `norecord' argument to Fselect_window. - (select-window (nth 2 state) 'norecord) - (and (frame-live-p (nth 3 state)) - (not (eq (tty-top-frame) (nth 3 state))) - (select-frame (nth 3 state) 'norecord))) - ;; Then reset the actual selected-window. - (when (window-live-p (nth 1 state)) - (select-window (nth 1 state) 'norecord))) - -(defmacro with-selected-window (window &rest body) - "Execute the forms in BODY with WINDOW as the selected window. -The value returned is the value of the last form in BODY. - -This macro saves and restores the selected window, as well as the -selected window of each frame. It does not change the order of -recently selected windows. If the previously selected window of -some frame is no longer live at the end of BODY, that frame's -selected window is left alone. If the selected window is no -longer live, then whatever window is selected at the end of BODY -remains selected. - -This macro uses `save-current-buffer' to save and restore the -current buffer, since otherwise its normal operation could -potentially make a different buffer current. It does not alter -the buffer list ordering." - (declare (indent 1) (debug t)) - `(let ((save-selected-window--state - (internal--before-with-selected-window ,window))) - (save-current-buffer - (unwind-protect - (progn (select-window (car save-selected-window--state) 'norecord) - ,@body) - (internal--after-with-selected-window save-selected-window--state))))) - -(defmacro with-selected-frame (frame &rest body) - "Execute the forms in BODY with FRAME as the selected frame. -The value returned is the value of the last form in BODY. - -This macro saves and restores the selected frame, and changes the -order of neither the recently selected windows nor the buffers in -the buffer list." - (declare (indent 1) (debug t)) - (let ((old-frame (make-symbol "old-frame")) - (old-buffer (make-symbol "old-buffer"))) - `(let ((,old-frame (selected-frame)) - (,old-buffer (current-buffer))) - (unwind-protect - (progn (select-frame ,frame 'norecord) - ,@body) - (when (frame-live-p ,old-frame) - (select-frame ,old-frame 'norecord)) - (when (buffer-live-p ,old-buffer) - (set-buffer ,old-buffer)))))) - -(defmacro save-window-excursion (&rest body) - "Execute BODY, then restore previous window configuration. -This macro saves the window configuration on the selected frame, -executes BODY, then calls `set-window-configuration' to restore -the saved window configuration. The return value is the last -form in BODY. The window configuration is also restored if BODY -exits nonlocally. - -BEWARE: Most uses of this macro introduce bugs. -E.g. it should not be used to try and prevent some code from opening -a new window, since that window may sometimes appear in another frame, -in which case `save-window-excursion' cannot help." - (declare (indent 0) (debug t)) - (let ((c (make-symbol "wconfig"))) - `(let ((,c (current-window-configuration))) - (unwind-protect (progn ,@body) - (set-window-configuration ,c))))) - -(defun internal-temp-output-buffer-show (buffer) - "Internal function for `with-output-to-temp-buffer'." - (with-current-buffer buffer - (set-buffer-modified-p nil) - (goto-char (point-min))) - - (if temp-buffer-show-function - (funcall temp-buffer-show-function buffer) - (with-current-buffer buffer - (let* ((window - (let ((window-combination-limit - ;; When `window-combination-limit' equals - ;; `temp-buffer' or `temp-buffer-resize' and - ;; `temp-buffer-resize-mode' is enabled in this - ;; buffer bind it to t so resizing steals space - ;; preferably from the window that was split. - (if (or (eq window-combination-limit 'temp-buffer) - (and (eq window-combination-limit - 'temp-buffer-resize) - temp-buffer-resize-mode)) - t - window-combination-limit))) - (display-buffer buffer))) - (frame (and window (window-frame window)))) - (when window - (unless (eq frame (selected-frame)) - (make-frame-visible frame)) - (setq minibuffer-scroll-window window) - (set-window-hscroll window 0) - ;; Don't try this with NOFORCE non-nil! - (set-window-start window (point-min) t) - ;; This should not be necessary. - (set-window-point window (point-min)) - ;; Run `temp-buffer-show-hook', with the chosen window selected. - (with-selected-window window - (run-hooks 'temp-buffer-show-hook)))))) - ;; Return nil. - nil) - -;; Doc is very similar to with-temp-buffer-window. -(defmacro with-output-to-temp-buffer (bufname &rest body) - "Bind `standard-output' to buffer BUFNAME, eval BODY, then show that buffer. - -This construct makes buffer BUFNAME empty before running BODY. -It does not make the buffer current for BODY. -Instead it binds `standard-output' to that buffer, so that output -generated with `prin1' and similar functions in BODY goes into -the buffer. - -At the end of BODY, this marks buffer BUFNAME unmodified and displays -it in a window, but does not select it. The normal way to do this is -by calling `display-buffer', then running `temp-buffer-show-hook'. -However, if `temp-buffer-show-function' is non-nil, it calls that -function instead (and does not run `temp-buffer-show-hook'). The -function gets one argument, the buffer to display. - -The return value of `with-output-to-temp-buffer' is the value of the -last form in BODY. If BODY does not finish normally, the buffer -BUFNAME is not displayed. - -This runs the hook `temp-buffer-setup-hook' before BODY, -with the buffer BUFNAME temporarily current. It runs the hook -`temp-buffer-show-hook' after displaying buffer BUFNAME, with that -buffer temporarily current, and the window that was used to display it -temporarily selected. But it doesn't run `temp-buffer-show-hook' -if it uses `temp-buffer-show-function'. - -By default, the setup hook puts the buffer into Help mode before running BODY. -If BODY does not change the major mode, the show hook makes the buffer -read-only, and scans it for function and variable names to make them into -clickable cross-references. - -See the related form `with-temp-buffer-window'." - (declare (debug t)) - (let ((old-dir (make-symbol "old-dir")) - (buf (make-symbol "buf"))) - `(let* ((,old-dir default-directory) - (,buf - (with-current-buffer (get-buffer-create ,bufname) - (prog1 (current-buffer) - (kill-all-local-variables) - ;; FIXME: delete_all_overlays - (setq default-directory ,old-dir) - (setq buffer-read-only nil) - (setq buffer-file-name nil) - (setq buffer-undo-list t) - (let ((inhibit-read-only t) - (inhibit-modification-hooks t)) - (erase-buffer) - (run-hooks 'temp-buffer-setup-hook))))) - (standard-output ,buf)) - (prog1 (progn ,@body) - (internal-temp-output-buffer-show ,buf))))) - -(defmacro with-temp-file (file &rest body) - "Create a new buffer, evaluate BODY there, and write the buffer to FILE. -The value returned is the value of the last form in BODY. -See also `with-temp-buffer'." - (declare (indent 1) (debug t)) - (let ((temp-file (make-symbol "temp-file")) - (temp-buffer (make-symbol "temp-buffer"))) - `(let ((,temp-file ,file) - (,temp-buffer - (get-buffer-create (generate-new-buffer-name " *temp file*")))) - (unwind-protect - (prog1 - (with-current-buffer ,temp-buffer - ,@body) - (with-current-buffer ,temp-buffer - (write-region nil nil ,temp-file nil 0))) - (and (buffer-name ,temp-buffer) - (kill-buffer ,temp-buffer)))))) - -(defmacro with-temp-message (message &rest body) - "Display MESSAGE temporarily if non-nil while BODY is evaluated. -The original message is restored to the echo area after BODY has finished. -The value returned is the value of the last form in BODY. -MESSAGE is written to the message log buffer if `message-log-max' is non-nil. -If MESSAGE is nil, the echo area and message log buffer are unchanged. -Use a MESSAGE of \"\" to temporarily clear the echo area." - (declare (debug t) (indent 1)) - (let ((current-message (make-symbol "current-message")) - (temp-message (make-symbol "with-temp-message"))) - `(let ((,temp-message ,message) - (,current-message)) - (unwind-protect - (progn - (when ,temp-message - (setq ,current-message (current-message)) - (message "%s" ,temp-message)) - ,@body) - (and ,temp-message - (if ,current-message - (message "%s" ,current-message) - (message nil))))))) - -(defmacro with-temp-buffer (&rest body) - "Create a temporary buffer, and evaluate BODY there like `progn'. -See also `with-temp-file' and `with-output-to-string'." - (declare (indent 0) (debug t)) - (let ((temp-buffer (make-symbol "temp-buffer"))) - `(let ((,temp-buffer (generate-new-buffer " *temp*"))) - ;; FIXME: kill-buffer can change current-buffer in some odd cases. - (with-current-buffer ,temp-buffer - (unwind-protect - (progn ,@body) - (and (buffer-name ,temp-buffer) - (kill-buffer ,temp-buffer))))))) - -(defmacro with-silent-modifications (&rest body) - "Execute BODY, pretending it does not modify the buffer. -If BODY performs real modifications to the buffer's text, other -than cosmetic ones, undo data may become corrupted. - -This macro will run BODY normally, but doesn't count its buffer -modifications as being buffer modifications. This affects things -like `buffer-modified-p', checking whether the file is locked by -someone else, running buffer modification hooks, and other things -of that nature. - -Typically used around modifications of text-properties which do -not really affect the buffer's content." - (declare (debug t) (indent 0)) - (let ((modified (make-symbol "modified"))) - `(let* ((,modified (buffer-modified-p)) - (buffer-undo-list t) - (inhibit-read-only t) - (inhibit-modification-hooks t)) - (unwind-protect - (progn - ,@body) - (unless ,modified - (restore-buffer-modified-p nil)))))) - -(defmacro with-output-to-string (&rest body) - "Execute BODY, return the text it sent to `standard-output', as a string." - (declare (indent 0) (debug t)) - `(let ((standard-output - (get-buffer-create (generate-new-buffer-name " *string-output*")))) - (unwind-protect - (progn - (let ((standard-output standard-output)) - ,@body) - (with-current-buffer standard-output - (buffer-string))) - (kill-buffer standard-output)))) - -(defmacro with-local-quit (&rest body) - "Execute BODY, allowing quits to terminate BODY but not escape further. -When a quit terminates BODY, `with-local-quit' returns nil but -requests another quit. That quit will be processed as soon as quitting -is allowed once again. (Immediately, if `inhibit-quit' is nil.)" - (declare (debug t) (indent 0)) - `(condition-case nil - (let ((inhibit-quit nil)) - ,@body) - (quit (setq quit-flag t) - ;; This call is to give a chance to handle quit-flag - ;; in case inhibit-quit is nil. - ;; Without this, it will not be handled until the next function - ;; call, and that might allow it to exit thru a condition-case - ;; that intends to handle the quit signal next time. - (eval '(ignore nil))))) - -(defmacro while-no-input (&rest body) - "Execute BODY only as long as there's no pending input. -If input arrives, that ends the execution of BODY, -and `while-no-input' returns t. Quitting makes it return nil. -If BODY finishes, `while-no-input' returns whatever value BODY produced." - (declare (debug t) (indent 0)) - (let ((catch-sym (make-symbol "input"))) - `(with-local-quit - (catch ',catch-sym - (let ((throw-on-input ',catch-sym)) - (or (input-pending-p) - (progn ,@body))))))) - -(defmacro condition-case-unless-debug (var bodyform &rest handlers) - "Like `condition-case' except that it does not prevent debugging. -More specifically if `debug-on-error' is set then the debugger will be invoked -even if this catches the signal." - (declare (debug condition-case) (indent 2)) - `(condition-case ,var - ,bodyform - ,@(mapcar (lambda (handler) - `((debug ,@(if (listp (car handler)) (car handler) - (list (car handler)))) - ,@(cdr handler))) - handlers))) - -(define-obsolete-function-alias 'condition-case-no-debug - 'condition-case-unless-debug "24.1") - -(defmacro with-demoted-errors (format &rest body) - "Run BODY and demote any errors to simple messages. -FORMAT is a string passed to `message' to format any error message. -It should contain a single %-sequence; e.g., \"Error: %S\". - -If `debug-on-error' is non-nil, run BODY without catching its errors. -This is to be used around code which is not expected to signal an error -but which should be robust in the unexpected case that an error is signaled. - -For backward compatibility, if FORMAT is not a constant string, it -is assumed to be part of BODY, in which case the message format -used is \"Error: %S\"." - (declare (debug t) (indent 1)) - (let ((err (make-symbol "err")) - (format (if (and (stringp format) body) format - (prog1 "Error: %S" - (if format (push format body)))))) - `(condition-case-unless-debug ,err - ,(macroexp-progn body) - (error (message ,format ,err) nil)))) - -(defmacro combine-after-change-calls (&rest body) - "Execute BODY, but don't call the after-change functions till the end. -If BODY makes changes in the buffer, they are recorded -and the functions on `after-change-functions' are called several times -when BODY is finished. -The return value is the value of the last form in BODY. - -If `before-change-functions' is non-nil, then calls to the after-change -functions can't be deferred, so in that case this macro has no effect. - -Do not alter `after-change-functions' or `before-change-functions' -in BODY." - (declare (indent 0) (debug t)) - `(unwind-protect - (let ((combine-after-change-calls t)) - . ,body) - (combine-after-change-execute))) - -(defmacro with-case-table (table &rest body) - "Execute the forms in BODY with TABLE as the current case table. -The value returned is the value of the last form in BODY." - (declare (indent 1) (debug t)) - (let ((old-case-table (make-symbol "table")) - (old-buffer (make-symbol "buffer"))) - `(let ((,old-case-table (current-case-table)) - (,old-buffer (current-buffer))) - (unwind-protect - (progn (set-case-table ,table) - ,@body) - (with-current-buffer ,old-buffer - (set-case-table ,old-case-table)))))) - -(defmacro with-file-modes (modes &rest body) - "Execute BODY with default file permissions temporarily set to MODES. -MODES is as for `set-default-file-modes'." - (declare (indent 1) (debug t)) - (let ((umask (make-symbol "umask"))) - `(let ((,umask (default-file-modes))) - (unwind-protect - (progn - (set-default-file-modes ,modes) - ,@body) - (set-default-file-modes ,umask))))) - - -;;; Matching and match data. - -(defvar save-match-data-internal) - -;; We use save-match-data-internal as the local variable because -;; that works ok in practice (people should not use that variable elsewhere). -;; We used to use an uninterned symbol; the compiler handles that properly -;; now, but it generates slower code. -(defmacro save-match-data (&rest body) - "Execute the BODY forms, restoring the global value of the match data. -The value returned is the value of the last form in BODY." - ;; It is better not to use backquote here, - ;; because that makes a bootstrapping problem - ;; if you need to recompile all the Lisp files using interpreted code. - (declare (indent 0) (debug t)) - (list 'let - '((save-match-data-internal (match-data))) - (list 'unwind-protect - (cons 'progn body) - ;; It is safe to free (evaporate) markers immediately here, - ;; as Lisp programs should not copy from save-match-data-internal. - '(set-match-data save-match-data-internal 'evaporate)))) - -(defun match-string (num &optional string) - "Return string of text matched by last search. -NUM specifies which parenthesized expression in the last regexp. - Value is nil if NUMth pair didn't match, or there were less than NUM pairs. -Zero means the entire text matched by the whole regexp or whole string. -STRING should be given if the last search was by `string-match' on STRING. -If STRING is nil, the current buffer should be the same buffer -the search/match was performed in." - (if (match-beginning num) - (if string - (substring string (match-beginning num) (match-end num)) - (buffer-substring (match-beginning num) (match-end num))))) - -(defun match-string-no-properties (num &optional string) - "Return string of text matched by last search, without text properties. -NUM specifies which parenthesized expression in the last regexp. - Value is nil if NUMth pair didn't match, or there were less than NUM pairs. -Zero means the entire text matched by the whole regexp or whole string. -STRING should be given if the last search was by `string-match' on STRING. -If STRING is nil, the current buffer should be the same buffer -the search/match was performed in." - (if (match-beginning num) - (if string - (substring-no-properties string (match-beginning num) - (match-end num)) - (buffer-substring-no-properties (match-beginning num) - (match-end num))))) - - -(defun match-substitute-replacement (replacement - &optional fixedcase literal string subexp) - "Return REPLACEMENT as it will be inserted by `replace-match'. -In other words, all back-references in the form `\\&' and `\\N' -are substituted with actual strings matched by the last search. -Optional FIXEDCASE, LITERAL, STRING and SUBEXP have the same -meaning as for `replace-match'." - (let ((match (match-string 0 string))) - (save-match-data - (set-match-data (mapcar (lambda (x) - (if (numberp x) - (- x (match-beginning 0)) - x)) - (match-data t))) - (replace-match replacement fixedcase literal match subexp)))) - - -(defun looking-back (regexp &optional limit greedy) - "Return non-nil if text before point matches regular expression REGEXP. -Like `looking-at' except matches before point, and is slower. -LIMIT if non-nil speeds up the search by specifying a minimum -starting position, to avoid checking matches that would start -before LIMIT. - -If GREEDY is non-nil, extend the match backwards as far as -possible, stopping when a single additional previous character -cannot be part of a match for REGEXP. When the match is -extended, its starting position is allowed to occur before -LIMIT. - -As a general recommendation, try to avoid using `looking-back' -wherever possible, since it is slow." - (let ((start (point)) - (pos - (save-excursion - (and (re-search-backward (concat "\\(?:" regexp "\\)\\=") limit t) - (point))))) - (if (and greedy pos) - (save-restriction - (narrow-to-region (point-min) start) - (while (and (> pos (point-min)) - (save-excursion - (goto-char pos) - (backward-char 1) - (looking-at (concat "\\(?:" regexp "\\)\\'")))) - (setq pos (1- pos))) - (save-excursion - (goto-char pos) - (looking-at (concat "\\(?:" regexp "\\)\\'"))))) - (not (null pos)))) - -(defsubst looking-at-p (regexp) - "\ -Same as `looking-at' except this function does not change the match data." - (let ((inhibit-changing-match-data t)) - (looking-at regexp))) - -(defsubst string-match-p (regexp string &optional start) - "\ -Same as `string-match' except this function does not change the match data." - (let ((inhibit-changing-match-data t)) - (string-match regexp string start))) - -(defun subregexp-context-p (regexp pos &optional start) - "Return non-nil if POS is in a normal subregexp context in REGEXP. -A subregexp context is one where a sub-regexp can appear. -A non-subregexp context is for example within brackets, or within a -repetition bounds operator `\\=\\{...\\}', or right after a `\\'. -If START is non-nil, it should be a position in REGEXP, smaller -than POS, and known to be in a subregexp context." - ;; Here's one possible implementation, with the great benefit that it - ;; reuses the regexp-matcher's own parser, so it understands all the - ;; details of the syntax. A disadvantage is that it needs to match the - ;; error string. - (condition-case err - (progn - (string-match (substring regexp (or start 0) pos) "") - t) - (invalid-regexp - (not (member (cadr err) '("Unmatched [ or [^" - "Unmatched \\{" - "Trailing backslash"))))) - ;; An alternative implementation: - ;; (defconst re-context-re - ;; (let* ((harmless-ch "[^\\[]") - ;; (harmless-esc "\\\\[^{]") - ;; (class-harmless-ch "[^][]") - ;; (class-lb-harmless "[^]:]") - ;; (class-lb-colon-maybe-charclass ":\\([a-z]+:]\\)?") - ;; (class-lb (concat "\\[\\(" class-lb-harmless - ;; "\\|" class-lb-colon-maybe-charclass "\\)")) - ;; (class - ;; (concat "\\[^?]?" - ;; "\\(" class-harmless-ch - ;; "\\|" class-lb "\\)*" - ;; "\\[?]")) ; special handling for bare [ at end of re - ;; (braces "\\\\{[0-9,]+\\\\}")) - ;; (concat "\\`\\(" harmless-ch "\\|" harmless-esc - ;; "\\|" class "\\|" braces "\\)*\\'")) - ;; "Matches any prefix that corresponds to a normal subregexp context.") - ;; (string-match re-context-re (substring regexp (or start 0) pos)) - ) - -;;;; split-string - -(defconst split-string-default-separators "[ \f\t\n\r\v]+" - "The default value of separators for `split-string'. - -A regexp matching strings of whitespace. May be locale-dependent -\(as yet unimplemented). Should not match non-breaking spaces. - -Warning: binding this to a different value and using it as default is -likely to have undesired semantics.") - -;; The specification says that if both SEPARATORS and OMIT-NULLS are -;; defaulted, OMIT-NULLS should be treated as t. Simplifying the logical -;; expression leads to the equivalent implementation that if SEPARATORS -;; is defaulted, OMIT-NULLS is treated as t. -(defun split-string (string &optional separators omit-nulls trim) - "Split STRING into substrings bounded by matches for SEPARATORS. - -The beginning and end of STRING, and each match for SEPARATORS, are -splitting points. The substrings matching SEPARATORS are removed, and -the substrings between the splitting points are collected as a list, -which is returned. - -If SEPARATORS is non-nil, it should be a regular expression matching text -which separates, but is not part of, the substrings. If nil it defaults to -`split-string-default-separators', normally \"[ \\f\\t\\n\\r\\v]+\", and -OMIT-NULLS is forced to t. - -If OMIT-NULLS is t, zero-length substrings are omitted from the list (so -that for the default value of SEPARATORS leading and trailing whitespace -are effectively trimmed). If nil, all zero-length substrings are retained, -which correctly parses CSV format, for example. - -If TRIM is non-nil, it should be a regular expression to match -text to trim from the beginning and end of each substring. If trimming -makes the substring empty, it is treated as null. - -If you want to trim whitespace from the substrings, the reliably correct -way is using TRIM. Making SEPARATORS match that whitespace gives incorrect -results when there is whitespace at the start or end of STRING. If you -see such calls to `split-string', please fix them. - -Note that the effect of `(split-string STRING)' is the same as -`(split-string STRING split-string-default-separators t)'. In the rare -case that you wish to retain zero-length substrings when splitting on -whitespace, use `(split-string STRING split-string-default-separators)'. - -Modifies the match data; use `save-match-data' if necessary." - (let* ((keep-nulls (not (if separators omit-nulls t))) - (rexp (or separators split-string-default-separators)) - (start 0) - this-start this-end - notfirst - (list nil) - (push-one - ;; Push the substring in range THIS-START to THIS-END - ;; onto LIST, trimming it and perhaps discarding it. - (lambda () - (when trim - ;; Discard the trim from start of this substring. - (let ((tem (string-match trim string this-start))) - (and (eq tem this-start) - (setq this-start (match-end 0))))) - - (when (or keep-nulls (< this-start this-end)) - (let ((this (substring string this-start this-end))) - - ;; Discard the trim from end of this substring. - (when trim - (let ((tem (string-match (concat trim "\\'") this 0))) - (and tem (< tem (length this)) - (setq this (substring this 0 tem))))) - - ;; Trimming could make it empty; check again. - (when (or keep-nulls (> (length this) 0)) - (push this list))))))) - - (while (and (string-match rexp string - (if (and notfirst - (= start (match-beginning 0)) - (< start (length string))) - (1+ start) start)) - (< start (length string))) - (setq notfirst t) - (setq this-start start this-end (match-beginning 0) - start (match-end 0)) - - (funcall push-one)) - - ;; Handle the substring at the end of STRING. - (setq this-start start this-end (length string)) - (funcall push-one) - - (nreverse list))) - -(defun combine-and-quote-strings (strings &optional separator) - "Concatenate the STRINGS, adding the SEPARATOR (default \" \"). -This tries to quote the strings to avoid ambiguity such that - (split-string-and-unquote (combine-and-quote-strings strs)) == strs -Only some SEPARATORs will work properly." - (let* ((sep (or separator " ")) - (re (concat "[\\\"]" "\\|" (regexp-quote sep)))) - (mapconcat - (lambda (str) - (if (string-match re str) - (concat "\"" (replace-regexp-in-string "[\\\"]" "\\\\\\&" str) "\"") - str)) - strings sep))) - -(defun split-string-and-unquote (string &optional separator) - "Split the STRING into a list of strings. -It understands Emacs Lisp quoting within STRING, such that - (split-string-and-unquote (combine-and-quote-strings strs)) == strs -The SEPARATOR regexp defaults to \"\\s-+\"." - (let ((sep (or separator "\\s-+")) - (i (string-match "\"" string))) - (if (null i) - (split-string string sep t) ; no quoting: easy - (append (unless (eq i 0) (split-string (substring string 0 i) sep t)) - (let ((rfs (read-from-string string i))) - (cons (car rfs) - (split-string-and-unquote (substring string (cdr rfs)) - sep))))))) - - -;;;; Replacement in strings. - -(defun subst-char-in-string (fromchar tochar string &optional inplace) - "Replace FROMCHAR with TOCHAR in STRING each time it occurs. -Unless optional argument INPLACE is non-nil, return a new string." - (let ((i (length string)) - (newstr (if inplace string (copy-sequence string)))) - (while (> i 0) - (setq i (1- i)) - (if (eq (aref newstr i) fromchar) - (aset newstr i tochar))) - newstr)) - -(defun replace-regexp-in-string (regexp rep string &optional - fixedcase literal subexp start) - "Replace all matches for REGEXP with REP in STRING. - -Return a new string containing the replacements. - -Optional arguments FIXEDCASE, LITERAL and SUBEXP are like the -arguments with the same names of function `replace-match'. If START -is non-nil, start replacements at that index in STRING. - -REP is either a string used as the NEWTEXT arg of `replace-match' or a -function. If it is a function, it is called with the actual text of each -match, and its value is used as the replacement text. When REP is called, -the match data are the result of matching REGEXP against a substring -of STRING. - -To replace only the first match (if any), make REGEXP match up to \\' -and replace a sub-expression, e.g. - (replace-regexp-in-string \"\\\\(foo\\\\).*\\\\'\" \"bar\" \" foo foo\" nil nil 1) - => \" bar foo\"" - - ;; To avoid excessive consing from multiple matches in long strings, - ;; don't just call `replace-match' continually. Walk down the - ;; string looking for matches of REGEXP and building up a (reversed) - ;; list MATCHES. This comprises segments of STRING which weren't - ;; matched interspersed with replacements for segments that were. - ;; [For a `large' number of replacements it's more efficient to - ;; operate in a temporary buffer; we can't tell from the function's - ;; args whether to choose the buffer-based implementation, though it - ;; might be reasonable to do so for long enough STRING.] - (let ((l (length string)) - (start (or start 0)) - matches str mb me) - (save-match-data - (while (and (< start l) (string-match regexp string start)) - (setq mb (match-beginning 0) - me (match-end 0)) - ;; If we matched the empty string, make sure we advance by one char - (when (= me mb) (setq me (min l (1+ mb)))) - ;; Generate a replacement for the matched substring. - ;; Operate only on the substring to minimize string consing. - ;; Set up match data for the substring for replacement; - ;; presumably this is likely to be faster than munging the - ;; match data directly in Lisp. - (string-match regexp (setq str (substring string mb me))) - (setq matches - (cons (replace-match (if (stringp rep) - rep - (funcall rep (match-string 0 str))) - fixedcase literal str subexp) - (cons (substring string start mb) ; unmatched prefix - matches))) - (setq start me)) - ;; Reconstruct a string from the pieces. - (setq matches (cons (substring string start l) matches)) ; leftover - (apply #'concat (nreverse matches))))) - -(defun string-prefix-p (prefix string &optional ignore-case) - "Return non-nil if PREFIX is a prefix of STRING. -If IGNORE-CASE is non-nil, the comparison is done without paying attention -to case differences." - (let ((prefix-length (length prefix))) - (if (> prefix-length (length string)) nil - (eq t (compare-strings prefix 0 prefix-length string - 0 prefix-length ignore-case))))) - -(defun string-suffix-p (suffix string &optional ignore-case) - "Return non-nil if SUFFIX is a suffix of STRING. -If IGNORE-CASE is non-nil, the comparison is done without paying -attention to case differences." - (let ((start-pos (- (length string) (length suffix)))) - (and (>= start-pos 0) - (eq t (compare-strings suffix nil nil - string start-pos nil ignore-case))))) - -(defun bidi-string-mark-left-to-right (str) - "Return a string that can be safely inserted in left-to-right text. - -Normally, inserting a string with right-to-left (RTL) script into -a buffer may cause some subsequent text to be displayed as part -of the RTL segment (usually this affects punctuation characters). -This function returns a string which displays as STR but forces -subsequent text to be displayed as left-to-right. - -If STR contains any RTL character, this function returns a string -consisting of STR followed by an invisible left-to-right mark -\(LRM) character. Otherwise, it returns STR." - (unless (stringp str) - (signal 'wrong-type-argument (list 'stringp str))) - (if (string-match "\\cR" str) - (concat str (propertize (string ?\x200e) 'invisible t)) - str)) - -;;;; Specifying things to do later. - -(defun load-history-regexp (file) - "Form a regexp to find FILE in `load-history'. -FILE, a string, is described in the function `eval-after-load'." - (if (file-name-absolute-p file) - (setq file (file-truename file))) - (concat (if (file-name-absolute-p file) "\\`" "\\(\\`\\|/\\)") - (regexp-quote file) - (if (file-name-extension file) - "" - ;; Note: regexp-opt can't be used here, since we need to call - ;; this before Emacs has been fully started. 2006-05-21 - (concat "\\(" (mapconcat 'regexp-quote load-suffixes "\\|") "\\)?")) - "\\(" (mapconcat 'regexp-quote jka-compr-load-suffixes "\\|") - "\\)?\\'")) - -(defun load-history-filename-element (file-regexp) - "Get the first elt of `load-history' whose car matches FILE-REGEXP. -Return nil if there isn't one." - (let* ((loads load-history) - (load-elt (and loads (car loads)))) - (save-match-data - (while (and loads - (or (null (car load-elt)) - (not (string-match file-regexp (car load-elt))))) - (setq loads (cdr loads) - load-elt (and loads (car loads))))) - load-elt)) - -(put 'eval-after-load 'lisp-indent-function 1) -(defun eval-after-load (file form) - "Arrange that if FILE is loaded, FORM will be run immediately afterwards. -If FILE is already loaded, evaluate FORM right now. -FORM can be an Elisp expression (in which case it's passed to `eval'), -or a function (in which case it's passed to `funcall' with no argument). - -If a matching file is loaded again, FORM will be evaluated again. - -If FILE is a string, it may be either an absolute or a relative file -name, and may have an extension (e.g. \".el\") or may lack one, and -additionally may or may not have an extension denoting a compressed -format (e.g. \".gz\"). - -When FILE is absolute, this first converts it to a true name by chasing -symbolic links. Only a file of this name (see next paragraph regarding -extensions) will trigger the evaluation of FORM. When FILE is relative, -a file whose absolute true name ends in FILE will trigger evaluation. - -When FILE lacks an extension, a file name with any extension will trigger -evaluation. Otherwise, its extension must match FILE's. A further -extension for a compressed format (e.g. \".gz\") on FILE will not affect -this name matching. - -Alternatively, FILE can be a feature (i.e. a symbol), in which case FORM -is evaluated at the end of any file that `provide's this feature. -If the feature is provided when evaluating code not associated with a -file, FORM is evaluated immediately after the provide statement. - -Usually FILE is just a library name like \"font-lock\" or a feature name -like 'font-lock. - -This function makes or adds to an entry on `after-load-alist'." - (declare (compiler-macro - (lambda (whole) - (if (eq 'quote (car-safe form)) - ;; Quote with lambda so the compiler can look inside. - `(eval-after-load ,file (lambda () ,(nth 1 form))) - whole)))) - ;; Add this FORM into after-load-alist (regardless of whether we'll be - ;; evaluating it now). - (let* ((regexp-or-feature - (if (stringp file) - (setq file (purecopy (load-history-regexp file))) - file)) - (elt (assoc regexp-or-feature after-load-alist)) - (func - (if (functionp form) form - ;; Try to use the "current" lexical/dynamic mode for `form'. - (eval `(lambda () ,form) lexical-binding)))) - (unless elt - (setq elt (list regexp-or-feature)) - (push elt after-load-alist)) - ;; Is there an already loaded file whose name (or `provide' name) - ;; matches FILE? - (prog1 (if (if (stringp file) - (load-history-filename-element regexp-or-feature) - (featurep file)) - (funcall func)) - (let ((delayed-func - (if (not (symbolp regexp-or-feature)) func - ;; For features, the after-load-alist elements get run when - ;; `provide' is called rather than at the end of the file. - ;; So add an indirection to make sure that `func' is really run - ;; "after-load" in case the provide call happens early. - (lambda () - (if (not load-file-name) - ;; Not being provided from a file, run func right now. - (funcall func) - (let ((lfn load-file-name) - ;; Don't use letrec, because equal (in - ;; add/remove-hook) would get trapped in a cycle. - (fun (make-symbol "eval-after-load-helper"))) - (fset fun (lambda (file) - (when (equal file lfn) - (remove-hook 'after-load-functions fun) - (funcall func)))) - (add-hook 'after-load-functions fun 'append))))))) - ;; Add FORM to the element unless it's already there. - (unless (member delayed-func (cdr elt)) - (nconc elt (list delayed-func))))))) - -(defmacro with-eval-after-load (file &rest body) - "Execute BODY after FILE is loaded. -FILE is normally a feature name, but it can also be a file name, -in case that file does not provide any feature." - (declare (indent 1) (debug t)) - `(eval-after-load ,file (lambda () ,@body))) - -(defvar after-load-functions nil - "Special hook run after loading a file. -Each function there is called with a single argument, the absolute -name of the file just loaded.") - -(defun do-after-load-evaluation (abs-file) - "Evaluate all `eval-after-load' forms, if any, for ABS-FILE. -ABS-FILE, a string, should be the absolute true name of a file just loaded. -This function is called directly from the C code." - ;; Run the relevant eval-after-load forms. - (dolist (a-l-element after-load-alist) - (when (and (stringp (car a-l-element)) - (string-match-p (car a-l-element) abs-file)) - ;; discard the file name regexp - (mapc #'funcall (cdr a-l-element)))) - ;; Complain when the user uses obsolete files. - (when (save-match-data - (and (string-match "/obsolete/\\([^/]*\\)\\'" abs-file) - (not (equal "loaddefs.el" (match-string 1 abs-file))))) - ;; Maybe we should just use display-warning? This seems yucky... - (let* ((file (file-name-nondirectory abs-file)) - (msg (format "Package %s is obsolete!" - (substring file 0 - (string-match "\\.elc?\\>" file))))) - ;; Cribbed from cl--compiling-file. - (if (and (boundp 'byte-compile--outbuffer) - (bufferp (symbol-value 'byte-compile--outbuffer)) - (equal (buffer-name (symbol-value 'byte-compile--outbuffer)) - " *Compiler Output*")) - ;; Don't warn about obsolete files using other obsolete files. - (unless (and (stringp byte-compile-current-file) - (string-match-p "/obsolete/[^/]*\\'" - (expand-file-name - byte-compile-current-file - byte-compile-root-dir))) - (byte-compile-log-warning msg)) - (run-with-timer 0 nil - (lambda (msg) - (message "%s" msg)) - msg)))) - - ;; Finally, run any other hook. - (run-hook-with-args 'after-load-functions abs-file)) - -(defun eval-next-after-load (file) - "Read the following input sexp, and run it whenever FILE is loaded. -This makes or adds to an entry on `after-load-alist'. -FILE should be the name of a library, with no directory name." - (declare (obsolete eval-after-load "23.2")) - (eval-after-load file (read))) - - -(defun display-delayed-warnings () - "Display delayed warnings from `delayed-warnings-list'. -Used from `delayed-warnings-hook' (which see)." - (dolist (warning (nreverse delayed-warnings-list)) - (apply 'display-warning warning)) - (setq delayed-warnings-list nil)) - -(defun collapse-delayed-warnings () - "Remove duplicates from `delayed-warnings-list'. -Collapse identical adjacent warnings into one (plus count). -Used from `delayed-warnings-hook' (which see)." - (let ((count 1) - collapsed warning) - (while delayed-warnings-list - (setq warning (pop delayed-warnings-list)) - (if (equal warning (car delayed-warnings-list)) - (setq count (1+ count)) - (when (> count 1) - (setcdr warning (cons (format "%s [%d times]" (cadr warning) count) - (cddr warning))) - (setq count 1)) - (push warning collapsed))) - (setq delayed-warnings-list (nreverse collapsed)))) - -;; At present this is only used for Emacs internals. -;; Ref http://lists.gnu.org/archive/html/emacs-devel/2012-02/msg00085.html -(defvar delayed-warnings-hook '(collapse-delayed-warnings - display-delayed-warnings) - "Normal hook run to process and display delayed warnings. -By default, this hook contains functions to consolidate the -warnings listed in `delayed-warnings-list', display them, and set -`delayed-warnings-list' back to nil.") - -(defun delay-warning (type message &optional level buffer-name) - "Display a delayed warning. -Aside from going through `delayed-warnings-list', this is equivalent -to `display-warning'." - (push (list type message level buffer-name) delayed-warnings-list)) - - -;;;; invisibility specs - -(defun add-to-invisibility-spec (element) - "Add ELEMENT to `buffer-invisibility-spec'. -See documentation for `buffer-invisibility-spec' for the kind of elements -that can be added." - (if (eq buffer-invisibility-spec t) - (setq buffer-invisibility-spec (list t))) - (setq buffer-invisibility-spec - (cons element buffer-invisibility-spec))) - -(defun remove-from-invisibility-spec (element) - "Remove ELEMENT from `buffer-invisibility-spec'." - (if (consp buffer-invisibility-spec) - (setq buffer-invisibility-spec - (delete element buffer-invisibility-spec)))) - -;;;; Syntax tables. - -(defmacro with-syntax-table (table &rest body) - "Evaluate BODY with syntax table of current buffer set to TABLE. -The syntax table of the current buffer is saved, BODY is evaluated, and the -saved table is restored, even in case of an abnormal exit. -Value is what BODY returns." - (declare (debug t) (indent 1)) - (let ((old-table (make-symbol "table")) - (old-buffer (make-symbol "buffer"))) - `(let ((,old-table (syntax-table)) - (,old-buffer (current-buffer))) - (unwind-protect - (progn - (set-syntax-table ,table) - ,@body) - (save-current-buffer - (set-buffer ,old-buffer) - (set-syntax-table ,old-table)))))) - -(defun make-syntax-table (&optional oldtable) - "Return a new syntax table. -Create a syntax table which inherits from OLDTABLE (if non-nil) or -from `standard-syntax-table' otherwise." - (let ((table (make-char-table 'syntax-table nil))) - (set-char-table-parent table (or oldtable (standard-syntax-table))) - table)) - -(defun syntax-after (pos) - "Return the raw syntax descriptor for the char after POS. -If POS is outside the buffer's accessible portion, return nil." - (unless (or (< pos (point-min)) (>= pos (point-max))) - (let ((st (if parse-sexp-lookup-properties - (get-char-property pos 'syntax-table)))) - (if (consp st) st - (aref (or st (syntax-table)) (char-after pos)))))) - -(defun syntax-class (syntax) - "Return the code for the syntax class described by SYNTAX. - -SYNTAX should be a raw syntax descriptor; the return value is a -integer which encodes the corresponding syntax class. See Info -node `(elisp)Syntax Table Internals' for a list of codes. - -If SYNTAX is nil, return nil." - (and syntax (logand (car syntax) 65535))) - -;; Utility motion commands - -;; Whitespace - -(defun forward-whitespace (arg) - "Move point to the end of the next sequence of whitespace chars. -Each such sequence may be a single newline, or a sequence of -consecutive space and/or tab characters. -With prefix argument ARG, do it ARG times if positive, or move -backwards ARG times if negative." - (interactive "^p") - (if (natnump arg) - (re-search-forward "[ \t]+\\|\n" nil 'move arg) - (while (< arg 0) - (if (re-search-backward "[ \t]+\\|\n" nil 'move) - (or (eq (char-after (match-beginning 0)) ?\n) - (skip-chars-backward " \t"))) - (setq arg (1+ arg))))) - -;; Symbols - -(defun forward-symbol (arg) - "Move point to the next position that is the end of a symbol. -A symbol is any sequence of characters that are in either the -word constituent or symbol constituent syntax class. -With prefix argument ARG, do it ARG times if positive, or move -backwards ARG times if negative." - (interactive "^p") - (if (natnump arg) - (re-search-forward "\\(\\sw\\|\\s_\\)+" nil 'move arg) - (while (< arg 0) - (if (re-search-backward "\\(\\sw\\|\\s_\\)+" nil 'move) - (skip-syntax-backward "w_")) - (setq arg (1+ arg))))) - -;; Syntax blocks - -(defun forward-same-syntax (&optional arg) - "Move point past all characters with the same syntax class. -With prefix argument ARG, do it ARG times if positive, or move -backwards ARG times if negative." - (interactive "^p") - (or arg (setq arg 1)) - (while (< arg 0) - (skip-syntax-backward - (char-to-string (char-syntax (char-before)))) - (setq arg (1+ arg))) - (while (> arg 0) - (skip-syntax-forward (char-to-string (char-syntax (char-after)))) - (setq arg (1- arg)))) - - -;;;; Text clones - -(defvar text-clone--maintaining nil) - -(defun text-clone--maintain (ol1 after beg end &optional _len) - "Propagate the changes made under the overlay OL1 to the other clones. -This is used on the `modification-hooks' property of text clones." - (when (and after (not undo-in-progress) - (not text-clone--maintaining) - (overlay-start ol1)) - (let ((margin (if (overlay-get ol1 'text-clone-spreadp) 1 0))) - (setq beg (max beg (+ (overlay-start ol1) margin))) - (setq end (min end (- (overlay-end ol1) margin))) - (when (<= beg end) - (save-excursion - (when (overlay-get ol1 'text-clone-syntax) - ;; Check content of the clone's text. - (let ((cbeg (+ (overlay-start ol1) margin)) - (cend (- (overlay-end ol1) margin))) - (goto-char cbeg) - (save-match-data - (if (not (re-search-forward - (overlay-get ol1 'text-clone-syntax) cend t)) - ;; Mark the overlay for deletion. - (setq end cbeg) - (when (< (match-end 0) cend) - ;; Shrink the clone at its end. - (setq end (min end (match-end 0))) - (move-overlay ol1 (overlay-start ol1) - (+ (match-end 0) margin))) - (when (> (match-beginning 0) cbeg) - ;; Shrink the clone at its beginning. - (setq beg (max (match-beginning 0) beg)) - (move-overlay ol1 (- (match-beginning 0) margin) - (overlay-end ol1))))))) - ;; Now go ahead and update the clones. - (let ((head (- beg (overlay-start ol1))) - (tail (- (overlay-end ol1) end)) - (str (buffer-substring beg end)) - (nothing-left t) - (text-clone--maintaining t)) - (dolist (ol2 (overlay-get ol1 'text-clones)) - (let ((oe (overlay-end ol2))) - (unless (or (eq ol1 ol2) (null oe)) - (setq nothing-left nil) - (let ((mod-beg (+ (overlay-start ol2) head))) - ;;(overlay-put ol2 'modification-hooks nil) - (goto-char (- (overlay-end ol2) tail)) - (unless (> mod-beg (point)) - (save-excursion (insert str)) - (delete-region mod-beg (point))) - ;;(overlay-put ol2 'modification-hooks '(text-clone--maintain)) - )))) - (if nothing-left (delete-overlay ol1)))))))) - -(defun text-clone-create (start end &optional spreadp syntax) - "Create a text clone of START...END at point. -Text clones are chunks of text that are automatically kept identical: -changes done to one of the clones will be immediately propagated to the other. - -The buffer's content at point is assumed to be already identical to -the one between START and END. -If SYNTAX is provided it's a regexp that describes the possible text of -the clones; the clone will be shrunk or killed if necessary to ensure that -its text matches the regexp. -If SPREADP is non-nil it indicates that text inserted before/after the -clone should be incorporated in the clone." - ;; To deal with SPREADP we can either use an overlay with `nil t' along - ;; with insert-(behind|in-front-of)-hooks or use a slightly larger overlay - ;; (with a one-char margin at each end) with `t nil'. - ;; We opted for a larger overlay because it behaves better in the case - ;; where the clone is reduced to the empty string (we want the overlay to - ;; stay when the clone's content is the empty string and we want to use - ;; `evaporate' to make sure those overlays get deleted when needed). - ;; - (let* ((pt-end (+ (point) (- end start))) - (start-margin (if (or (not spreadp) (bobp) (<= start (point-min))) - 0 1)) - (end-margin (if (or (not spreadp) - (>= pt-end (point-max)) - (>= start (point-max))) - 0 1)) - ;; FIXME: Reuse overlays at point to extend dups! - (ol1 (make-overlay (- start start-margin) (+ end end-margin) nil t)) - (ol2 (make-overlay (- (point) start-margin) (+ pt-end end-margin) nil t)) - (dups (list ol1 ol2))) - (overlay-put ol1 'modification-hooks '(text-clone--maintain)) - (when spreadp (overlay-put ol1 'text-clone-spreadp t)) - (when syntax (overlay-put ol1 'text-clone-syntax syntax)) - ;;(overlay-put ol1 'face 'underline) - (overlay-put ol1 'evaporate t) - (overlay-put ol1 'text-clones dups) - ;; - (overlay-put ol2 'modification-hooks '(text-clone--maintain)) - (when spreadp (overlay-put ol2 'text-clone-spreadp t)) - (when syntax (overlay-put ol2 'text-clone-syntax syntax)) - ;;(overlay-put ol2 'face 'underline) - (overlay-put ol2 'evaporate t) - (overlay-put ol2 'text-clones dups))) - -;;;; Mail user agents. - -;; Here we include just enough for other packages to be able -;; to define them. - -(defun define-mail-user-agent (symbol composefunc sendfunc - &optional abortfunc hookvar) - "Define a symbol to identify a mail-sending package for `mail-user-agent'. - -SYMBOL can be any Lisp symbol. Its function definition and/or -value as a variable do not matter for this usage; we use only certain -properties on its property list, to encode the rest of the arguments. - -COMPOSEFUNC is program callable function that composes an outgoing -mail message buffer. This function should set up the basics of the -buffer without requiring user interaction. It should populate the -standard mail headers, leaving the `to:' and `subject:' headers blank -by default. - -COMPOSEFUNC should accept several optional arguments--the same -arguments that `compose-mail' takes. See that function's documentation. - -SENDFUNC is the command a user would run to send the message. - -Optional ABORTFUNC is the command a user would run to abort the -message. For mail packages that don't have a separate abort function, -this can be `kill-buffer' (the equivalent of omitting this argument). - -Optional HOOKVAR is a hook variable that gets run before the message -is actually sent. Callers that use the `mail-user-agent' may -install a hook function temporarily on this hook variable. -If HOOKVAR is nil, `mail-send-hook' is used. - -The properties used on SYMBOL are `composefunc', `sendfunc', -`abortfunc', and `hookvar'." - (put symbol 'composefunc composefunc) - (put symbol 'sendfunc sendfunc) - (put symbol 'abortfunc (or abortfunc 'kill-buffer)) - (put symbol 'hookvar (or hookvar 'mail-send-hook))) - -(defvar called-interactively-p-functions nil - "Special hook called to skip special frames in `called-interactively-p'. -The functions are called with 3 arguments: (I FRAME1 FRAME2), -where FRAME1 is a \"current frame\", FRAME2 is the next frame, -I is the index of the frame after FRAME2. It should return nil -if those frames don't seem special and otherwise, it should return -the number of frames to skip (minus 1).") - -(defconst internal--funcall-interactively - (symbol-function 'funcall-interactively)) - -(defun called-interactively-p (&optional kind) - "Return t if the containing function was called by `call-interactively'. -If KIND is `interactive', then only return t if the call was made -interactively by the user, i.e. not in `noninteractive' mode nor -when `executing-kbd-macro'. -If KIND is `any', on the other hand, it will return t for any kind of -interactive call, including being called as the binding of a key or -from a keyboard macro, even in `noninteractive' mode. - -This function is very brittle, it may fail to return the intended result when -the code is debugged, advised, or instrumented in some form. Some macros and -special forms (such as `condition-case') may also sometimes wrap their bodies -in a `lambda', so any call to `called-interactively-p' from those bodies will -indicate whether that lambda (rather than the surrounding function) was called -interactively. - -Instead of using this function, it is cleaner and more reliable to give your -function an extra optional argument whose `interactive' spec specifies -non-nil unconditionally (\"p\" is a good way to do this), or via -\(not (or executing-kbd-macro noninteractive)). - -The only known proper use of `interactive' for KIND is in deciding -whether to display a helpful message, or how to display it. If you're -thinking of using it for any other purpose, it is quite likely that -you're making a mistake. Think: what do you want to do when the -command is called from a keyboard macro?" - (declare (advertised-calling-convention (kind) "23.1")) - (when (not (and (eq kind 'interactive) - (or executing-kbd-macro noninteractive))) - (let* ((i 1) ;; 0 is the called-interactively-p frame. - frame nextframe - (get-next-frame - (lambda () - (setq frame nextframe) - (setq nextframe (backtrace-frame i 'called-interactively-p)) - ;; (message "Frame %d = %S" i nextframe) - (setq i (1+ i))))) - (funcall get-next-frame) ;; Get the first frame. - (while - ;; FIXME: The edebug and advice handling should be made modular and - ;; provided directly by edebug.el and nadvice.el. - (progn - ;; frame =(backtrace-frame i-2) - ;; nextframe=(backtrace-frame i-1) - (funcall get-next-frame) - ;; `pcase' would be a fairly good fit here, but it sometimes moves - ;; branches within local functions, which then messes up the - ;; `backtrace-frame' data we get, - (or - ;; Skip special forms (from non-compiled code). - (and frame (null (car frame))) - ;; Skip also `interactive-p' (because we don't want to know if - ;; interactive-p was called interactively but if it's caller was) - ;; and `byte-code' (idem; this appears in subexpressions of things - ;; like condition-case, which are wrapped in a separate bytecode - ;; chunk). - ;; FIXME: For lexical-binding code, this is much worse, - ;; because the frames look like "byte-code -> funcall -> #[...]", - ;; which is not a reliable signature. - (memq (nth 1 frame) '(interactive-p 'byte-code)) - ;; Skip package-specific stack-frames. - (let ((skip (run-hook-with-args-until-success - 'called-interactively-p-functions - i frame nextframe))) - (pcase skip - (`nil nil) - (`0 t) - (_ (setq i (+ i skip -1)) (funcall get-next-frame))))))) - ;; Now `frame' should be "the function from which we were called". - (pcase (cons frame nextframe) - ;; No subr calls `interactive-p', so we can rule that out. - (`((,_ ,(pred (lambda (f) (subrp (indirect-function f)))) . ,_) . ,_) nil) - ;; In case # without going through the - ;; `funcall-interactively' symbol (bug#3984). - (`(,_ . (t ,(pred (lambda (f) - (eq internal--funcall-interactively - (indirect-function f)))) - . ,_)) - t))))) - -(defun interactive-p () - "Return t if the containing function was run directly by user input. -This means that the function was called with `call-interactively' -\(which includes being called as the binding of a key) -and input is currently coming from the keyboard (not a keyboard macro), -and Emacs is not running in batch mode (`noninteractive' is nil). - -The only known proper use of `interactive-p' is in deciding whether to -display a helpful message, or how to display it. If you're thinking -of using it for any other purpose, it is quite likely that you're -making a mistake. Think: what do you want to do when the command is -called from a keyboard macro or in batch mode? - -To test whether your function was called with `call-interactively', -either (i) add an extra optional argument and give it an `interactive' -spec that specifies non-nil unconditionally (such as \"p\"); or (ii) -use `called-interactively-p'." - (declare (obsolete called-interactively-p "23.2")) - (called-interactively-p 'interactive)) - -(defun internal-push-keymap (keymap symbol) - (let ((map (symbol-value symbol))) - (unless (memq keymap map) - (unless (memq 'add-keymap-witness (symbol-value symbol)) - (setq map (make-composed-keymap nil (symbol-value symbol))) - (push 'add-keymap-witness (cdr map)) - (set symbol map)) - (push keymap (cdr map))))) - -(defun internal-pop-keymap (keymap symbol) - (let ((map (symbol-value symbol))) - (when (memq keymap map) - (setf (cdr map) (delq keymap (cdr map)))) - (let ((tail (cddr map))) - (and (or (null tail) (keymapp tail)) - (eq 'add-keymap-witness (nth 1 map)) - (set symbol tail))))) - -(define-obsolete-function-alias - 'set-temporary-overlay-map 'set-transient-map "24.4") - -(defun set-transient-map (map &optional keep-pred on-exit) - "Set MAP as a temporary keymap taking precedence over other keymaps. -Normally, MAP is used only once, to look up the very next key. -However, if the optional argument KEEP-PRED is t, MAP stays -active if a key from MAP is used. KEEP-PRED can also be a -function of no arguments: it is called from `pre-command-hook' and -if it returns non-nil, then MAP stays active. - -Optional arg ON-EXIT, if non-nil, specifies a function that is -called, with no arguments, after MAP is deactivated. - -This uses `overriding-terminal-local-map' which takes precedence over all other -keymaps. As usual, if no match for a key is found in MAP, the normal key -lookup sequence then continues. - -This returns an \"exit function\", which can be called with no argument -to deactivate this transient map, regardless of KEEP-PRED." - (let* ((clearfun (make-symbol "clear-transient-map")) - (exitfun - (lambda () - (internal-pop-keymap map 'overriding-terminal-local-map) - (remove-hook 'pre-command-hook clearfun) - (when on-exit (funcall on-exit))))) - ;; Don't use letrec, because equal (in add/remove-hook) would get trapped - ;; in a cycle. - (fset clearfun - (lambda () - (with-demoted-errors "set-transient-map PCH: %S" - (unless (cond - ((null keep-pred) nil) - ((not (eq map (cadr overriding-terminal-local-map))) - ;; There's presumably some other transient-map in - ;; effect. Wait for that one to terminate before we - ;; remove ourselves. - ;; For example, if isearch and C-u both use transient - ;; maps, then the lifetime of the C-u should be nested - ;; within isearch's, so the pre-command-hook of - ;; isearch should be suspended during the C-u one so - ;; we don't exit isearch just because we hit 1 after - ;; C-u and that 1 exits isearch whereas it doesn't - ;; exit C-u. - t) - ((eq t keep-pred) - (eq this-command - (lookup-key map (this-command-keys-vector)))) - (t (funcall keep-pred))) - (funcall exitfun))))) - (add-hook 'pre-command-hook clearfun) - (internal-push-keymap map 'overriding-terminal-local-map) - exitfun)) - -;;;; Progress reporters. - -;; Progress reporter has the following structure: -;; -;; (NEXT-UPDATE-VALUE . [NEXT-UPDATE-TIME -;; MIN-VALUE -;; MAX-VALUE -;; MESSAGE -;; MIN-CHANGE -;; MIN-TIME]) -;; -;; This weirdness is for optimization reasons: we want -;; `progress-reporter-update' to be as fast as possible, so -;; `(car reporter)' is better than `(aref reporter 0)'. -;; -;; NEXT-UPDATE-TIME is a float. While `float-time' loses a couple -;; digits of precision, it doesn't really matter here. On the other -;; hand, it greatly simplifies the code. - -(defsubst progress-reporter-update (reporter &optional value) - "Report progress of an operation in the echo area. -REPORTER should be the result of a call to `make-progress-reporter'. - -If REPORTER is a numerical progress reporter---i.e. if it was - made using non-nil MIN-VALUE and MAX-VALUE arguments to - `make-progress-reporter'---then VALUE should be a number between - MIN-VALUE and MAX-VALUE. - -If REPORTER is a non-numerical reporter, VALUE should be nil. - -This function is relatively inexpensive. If the change since -last update is too small or insufficient time has passed, it does -nothing." - (when (or (not (numberp value)) ; For pulsing reporter - (>= value (car reporter))) ; For numerical reporter - (progress-reporter-do-update reporter value))) - -(defun make-progress-reporter (message &optional min-value max-value - current-value min-change min-time) - "Return progress reporter object for use with `progress-reporter-update'. - -MESSAGE is shown in the echo area, with a status indicator -appended to the end. When you call `progress-reporter-done', the -word \"done\" is printed after the MESSAGE. You can change the -MESSAGE of an existing progress reporter by calling -`progress-reporter-force-update'. - -MIN-VALUE and MAX-VALUE, if non-nil, are starting (0% complete) -and final (100% complete) states of operation; the latter should -be larger. In this case, the status message shows the percentage -progress. - -If MIN-VALUE and/or MAX-VALUE is omitted or nil, the status -message shows a \"spinning\", non-numeric indicator. - -Optional CURRENT-VALUE is the initial progress; the default is -MIN-VALUE. -Optional MIN-CHANGE is the minimal change in percents to report; -the default is 1%. -CURRENT-VALUE and MIN-CHANGE do not have any effect if MIN-VALUE -and/or MAX-VALUE are nil. - -Optional MIN-TIME specifies the minimum interval time between -echo area updates (default is 0.2 seconds.) If the function -`float-time' is not present, time is not tracked at all. If the -OS is not capable of measuring fractions of seconds, this -parameter is effectively rounded up." - (when (string-match "[[:alnum:]]\\'" message) - (setq message (concat message "..."))) - (unless min-time - (setq min-time 0.2)) - (let ((reporter - ;; Force a call to `message' now - (cons (or min-value 0) - (vector (if (and (fboundp 'float-time) - (>= min-time 0.02)) - (float-time) nil) - min-value - max-value - message - (if min-change (max (min min-change 50) 1) 1) - min-time)))) - (progress-reporter-update reporter (or current-value min-value)) - reporter)) - -(defun progress-reporter-force-update (reporter &optional value new-message) - "Report progress of an operation in the echo area unconditionally. - -The first two arguments are the same as in `progress-reporter-update'. -NEW-MESSAGE, if non-nil, sets a new message for the reporter." - (let ((parameters (cdr reporter))) - (when new-message - (aset parameters 3 new-message)) - (when (aref parameters 0) - (aset parameters 0 (float-time))) - (progress-reporter-do-update reporter value))) - -(defvar progress-reporter--pulse-characters ["-" "\\" "|" "/"] - "Characters to use for pulsing progress reporters.") - -(defun progress-reporter-do-update (reporter value) - (let* ((parameters (cdr reporter)) - (update-time (aref parameters 0)) - (min-value (aref parameters 1)) - (max-value (aref parameters 2)) - (text (aref parameters 3)) - (enough-time-passed - ;; See if enough time has passed since the last update. - (or (not update-time) - (when (>= (float-time) update-time) - ;; Calculate time for the next update - (aset parameters 0 (+ update-time (aref parameters 5))))))) - (cond ((and min-value max-value) - ;; Numerical indicator - (let* ((one-percent (/ (- max-value min-value) 100.0)) - (percentage (if (= max-value min-value) - 0 - (truncate (/ (- value min-value) - one-percent))))) - ;; Calculate NEXT-UPDATE-VALUE. If we are not printing - ;; message because not enough time has passed, use 1 - ;; instead of MIN-CHANGE. This makes delays between echo - ;; area updates closer to MIN-TIME. - (setcar reporter - (min (+ min-value (* (+ percentage - (if enough-time-passed - ;; MIN-CHANGE - (aref parameters 4) - 1)) - one-percent)) - max-value)) - (when (integerp value) - (setcar reporter (ceiling (car reporter)))) - ;; Only print message if enough time has passed - (when enough-time-passed - (if (> percentage 0) - (message "%s%d%%" text percentage) - (message "%s" text))))) - ;; Pulsing indicator - (enough-time-passed - (let ((index (mod (1+ (car reporter)) 4)) - (message-log-max nil)) - (setcar reporter index) - (message "%s %s" - text - (aref progress-reporter--pulse-characters - index))))))) - -(defun progress-reporter-done (reporter) - "Print reporter's message followed by word \"done\" in echo area." - (message "%sdone" (aref (cdr reporter) 3))) - -(defmacro dotimes-with-progress-reporter (spec message &rest body) - "Loop a certain number of times and report progress in the echo area. -Evaluate BODY with VAR bound to successive integers running from -0, inclusive, to COUNT, exclusive. Then evaluate RESULT to get -the return value (nil if RESULT is omitted). - -At each iteration MESSAGE followed by progress percentage is -printed in the echo area. After the loop is finished, MESSAGE -followed by word \"done\" is printed. This macro is a -convenience wrapper around `make-progress-reporter' and friends. - -\(fn (VAR COUNT [RESULT]) MESSAGE BODY...)" - (declare (indent 2) (debug ((symbolp form &optional form) form body))) - (let ((temp (make-symbol "--dotimes-temp--")) - (temp2 (make-symbol "--dotimes-temp2--")) - (start 0) - (end (nth 1 spec))) - `(let ((,temp ,end) - (,(car spec) ,start) - (,temp2 (make-progress-reporter ,message ,start ,end))) - (while (< ,(car spec) ,temp) - ,@body - (progress-reporter-update ,temp2 - (setq ,(car spec) (1+ ,(car spec))))) - (progress-reporter-done ,temp2) - nil ,@(cdr (cdr spec))))) - - -;;;; Comparing version strings. - -(defconst version-separator "." - "Specify the string used to separate the version elements. - -Usually the separator is \".\", but it can be any other string.") - - -(defconst version-regexp-alist - '(("^[-_+ ]?snapshot$" . -4) - ;; treat "1.2.3-20050920" and "1.2-3" as snapshot releases - ("^[-_+]$" . -4) - ;; treat "1.2.3-CVS" as snapshot release - ("^[-_+ ]?\\(cvs\\|git\\|bzr\\|svn\\|hg\\|darcs\\)$" . -4) - ("^[-_+ ]?alpha$" . -3) - ("^[-_+ ]?beta$" . -2) - ("^[-_+ ]?\\(pre\\|rc\\)$" . -1)) - "Specify association between non-numeric version and its priority. - -This association is used to handle version string like \"1.0pre2\", -\"0.9alpha1\", etc. It's used by `version-to-list' (which see) to convert the -non-numeric part of a version string to an integer. For example: - - String Version Integer List Version - \"0.9snapshot\" (0 9 -4) - \"1.0-git\" (1 0 -4) - \"1.0pre2\" (1 0 -1 2) - \"1.0PRE2\" (1 0 -1 2) - \"22.8beta3\" (22 8 -2 3) - \"22.8 Beta3\" (22 8 -2 3) - \"0.9alpha1\" (0 9 -3 1) - \"0.9AlphA1\" (0 9 -3 1) - \"0.9 alpha\" (0 9 -3) - -Each element has the following form: - - (REGEXP . PRIORITY) - -Where: - -REGEXP regexp used to match non-numeric part of a version string. - It should begin with the `^' anchor and end with a `$' to - prevent false hits. Letter-case is ignored while matching - REGEXP. - -PRIORITY a negative integer specifying non-numeric priority of REGEXP.") - - -(defun version-to-list (ver) - "Convert version string VER into a list of integers. - -The version syntax is given by the following EBNF: - - VERSION ::= NUMBER ( SEPARATOR NUMBER )*. - - NUMBER ::= (0|1|2|3|4|5|6|7|8|9)+. - - SEPARATOR ::= `version-separator' (which see) - | `version-regexp-alist' (which see). - -The NUMBER part is optional if SEPARATOR is a match for an element -in `version-regexp-alist'. - -Examples of valid version syntax: - - 1.0pre2 1.0.7.5 22.8beta3 0.9alpha1 6.9.30Beta - -Examples of invalid version syntax: - - 1.0prepre2 1.0..7.5 22.8X3 alpha3.2 .5 - -Examples of version conversion: - - Version String Version as a List of Integers - \"1.0.7.5\" (1 0 7 5) - \"1.0pre2\" (1 0 -1 2) - \"1.0PRE2\" (1 0 -1 2) - \"22.8beta3\" (22 8 -2 3) - \"22.8Beta3\" (22 8 -2 3) - \"0.9alpha1\" (0 9 -3 1) - \"0.9AlphA1\" (0 9 -3 1) - \"0.9alpha\" (0 9 -3) - \"0.9snapshot\" (0 9 -4) - \"1.0-git\" (1 0 -4) - -See documentation for `version-separator' and `version-regexp-alist'." - (or (and (stringp ver) (> (length ver) 0)) - (error "Invalid version string: '%s'" ver)) - ;; Change .x.y to 0.x.y - (if (and (>= (length ver) (length version-separator)) - (string-equal (substring ver 0 (length version-separator)) - version-separator)) - (setq ver (concat "0" ver))) - (save-match-data - (let ((i 0) - (case-fold-search t) ; ignore case in matching - lst s al) - (while (and (setq s (string-match "[0-9]+" ver i)) - (= s i)) - ;; handle numeric part - (setq lst (cons (string-to-number (substring ver i (match-end 0))) - lst) - i (match-end 0)) - ;; handle non-numeric part - (when (and (setq s (string-match "[^0-9]+" ver i)) - (= s i)) - (setq s (substring ver i (match-end 0)) - i (match-end 0)) - ;; handle alpha, beta, pre, etc. separator - (unless (string= s version-separator) - (setq al version-regexp-alist) - (while (and al (not (string-match (caar al) s))) - (setq al (cdr al))) - (cond (al - (push (cdar al) lst)) - ;; Convert 22.3a to 22.3.1, 22.3b to 22.3.2, etc. - ((string-match "^[-_+ ]?\\([a-zA-Z]\\)$" s) - (push (- (aref (downcase (match-string 1 s)) 0) ?a -1) - lst)) - (t (error "Invalid version syntax: '%s'" ver)))))) - (if (null lst) - (error "Invalid version syntax: '%s'" ver) - (nreverse lst))))) - - -(defun version-list-< (l1 l2) - "Return t if L1, a list specification of a version, is lower than L2. - -Note that a version specified by the list (1) is equal to (1 0), -\(1 0 0), (1 0 0 0), etc. That is, the trailing zeros are insignificant. -Also, a version given by the list (1) is higher than (1 -1), which in -turn is higher than (1 -2), which is higher than (1 -3)." - (while (and l1 l2 (= (car l1) (car l2))) - (setq l1 (cdr l1) - l2 (cdr l2))) - (cond - ;; l1 not null and l2 not null - ((and l1 l2) (< (car l1) (car l2))) - ;; l1 null and l2 null ==> l1 length = l2 length - ((and (null l1) (null l2)) nil) - ;; l1 not null and l2 null ==> l1 length > l2 length - (l1 (< (version-list-not-zero l1) 0)) - ;; l1 null and l2 not null ==> l2 length > l1 length - (t (< 0 (version-list-not-zero l2))))) - - -(defun version-list-= (l1 l2) - "Return t if L1, a list specification of a version, is equal to L2. - -Note that a version specified by the list (1) is equal to (1 0), -\(1 0 0), (1 0 0 0), etc. That is, the trailing zeros are insignificant. -Also, a version given by the list (1) is higher than (1 -1), which in -turn is higher than (1 -2), which is higher than (1 -3)." - (while (and l1 l2 (= (car l1) (car l2))) - (setq l1 (cdr l1) - l2 (cdr l2))) - (cond - ;; l1 not null and l2 not null - ((and l1 l2) nil) - ;; l1 null and l2 null ==> l1 length = l2 length - ((and (null l1) (null l2))) - ;; l1 not null and l2 null ==> l1 length > l2 length - (l1 (zerop (version-list-not-zero l1))) - ;; l1 null and l2 not null ==> l2 length > l1 length - (t (zerop (version-list-not-zero l2))))) - - -(defun version-list-<= (l1 l2) - "Return t if L1, a list specification of a version, is lower or equal to L2. - -Note that integer list (1) is equal to (1 0), (1 0 0), (1 0 0 0), -etc. That is, the trailing zeroes are insignificant. Also, integer -list (1) is greater than (1 -1) which is greater than (1 -2) -which is greater than (1 -3)." - (while (and l1 l2 (= (car l1) (car l2))) - (setq l1 (cdr l1) - l2 (cdr l2))) - (cond - ;; l1 not null and l2 not null - ((and l1 l2) (< (car l1) (car l2))) - ;; l1 null and l2 null ==> l1 length = l2 length - ((and (null l1) (null l2))) - ;; l1 not null and l2 null ==> l1 length > l2 length - (l1 (<= (version-list-not-zero l1) 0)) - ;; l1 null and l2 not null ==> l2 length > l1 length - (t (<= 0 (version-list-not-zero l2))))) - -(defun version-list-not-zero (lst) - "Return the first non-zero element of LST, which is a list of integers. - -If all LST elements are zeros or LST is nil, return zero." - (while (and lst (zerop (car lst))) - (setq lst (cdr lst))) - (if lst - (car lst) - ;; there is no element different of zero - 0)) - - -(defun version< (v1 v2) - "Return t if version V1 is lower (older) than V2. - -Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\", -etc. That is, the trailing \".0\"s are insignificant. Also, version -string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\", -which is higher than \"1alpha\", which is higher than \"1snapshot\". -Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions." - (version-list-< (version-to-list v1) (version-to-list v2))) - -(defun version<= (v1 v2) - "Return t if version V1 is lower (older) than or equal to V2. - -Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\", -etc. That is, the trailing \".0\"s are insignificant. Also, version -string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\", -which is higher than \"1alpha\", which is higher than \"1snapshot\". -Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions." - (version-list-<= (version-to-list v1) (version-to-list v2))) - -(defun version= (v1 v2) - "Return t if version V1 is equal to V2. - -Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\", -etc. That is, the trailing \".0\"s are insignificant. Also, version -string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\", -which is higher than \"1alpha\", which is higher than \"1snapshot\". -Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions." - (version-list-= (version-to-list v1) (version-to-list v2))) - -(defvar package--builtin-versions - ;; Mostly populated by loaddefs.el via autoload-builtin-package-versions. - (purecopy `((emacs . ,(version-to-list emacs-version)))) - "Alist giving the version of each versioned builtin package. -I.e. each element of the list is of the form (NAME . VERSION) where -NAME is the package name as a symbol, and VERSION is its version -as a list.") - -(defun package--description-file (dir) - (concat (let ((subdir (file-name-nondirectory - (directory-file-name dir)))) - (if (string-match "\\([^.].*?\\)-\\([0-9]+\\(?:[.][0-9]+\\|\\(?:pre\\|beta\\|alpha\\)[0-9]+\\)*\\)" subdir) - (match-string 1 subdir) subdir)) - "-pkg.el")) - - -;;; Misc. -(defconst menu-bar-separator '("--") - "Separator for menus.") - -;; The following statement ought to be in print.c, but `provide' can't -;; be used there. -;; http://lists.gnu.org/archive/html/emacs-devel/2009-08/msg00236.html -(when (hash-table-p (car (read-from-string - (prin1-to-string (make-hash-table))))) - (provide 'hashtable-print-readable)) - -;; This is used in lisp/Makefile.in and in leim/Makefile.in to -;; generate file names for autoloads, custom-deps, and finder-data. -(defun unmsys--file-name (file) - "Produce the canonical file name for FILE from its MSYS form. - -On systems other than MS-Windows, just returns FILE. -On MS-Windows, converts /d/foo/bar form of file names -passed by MSYS Make into d:/foo/bar that Emacs can grok. - -This function is called from lisp/Makefile and leim/Makefile." - (when (and (eq system-type 'windows-nt) - (string-match "\\`/[a-zA-Z]/" file)) - (setq file (concat (substring file 1 2) ":" (substring file 2)))) - file) - - -;;; subr.el ends here diff --git a/tests/examplefiles/swig_java.swg b/tests/examplefiles/swig_java.swg deleted file mode 100644 index 6126a55e..00000000 --- a/tests/examplefiles/swig_java.swg +++ /dev/null @@ -1,1329 +0,0 @@ -/* ----------------------------------------------------------------------------- - * java.swg - * - * Java typemaps - * ----------------------------------------------------------------------------- */ - -%include - -/* The jni, jtype and jstype typemaps work together and so there should be one of each. - * The jni typemap contains the JNI type used in the JNI (C/C++) code. - * The jtype typemap contains the Java type used in the JNI intermediary class. - * The jstype typemap contains the Java type used in the Java proxy classes, type wrapper classes and module class. */ - -/* Fragments */ -%fragment("SWIG_PackData", "header") { -/* Pack binary data into a string */ -SWIGINTERN char * SWIG_PackData(char *c, void *ptr, size_t sz) { - static const char hex[17] = "0123456789abcdef"; - register const unsigned char *u = (unsigned char *) ptr; - register const unsigned char *eu = u + sz; - for (; u != eu; ++u) { - register unsigned char uu = *u; - *(c++) = hex[(uu & 0xf0) >> 4]; - *(c++) = hex[uu & 0xf]; - } - return c; -} -} - -%fragment("SWIG_UnPackData", "header") { -/* Unpack binary data from a string */ -SWIGINTERN const char * SWIG_UnpackData(const char *c, void *ptr, size_t sz) { - register unsigned char *u = (unsigned char *) ptr; - register const unsigned char *eu = u + sz; - for (; u != eu; ++u) { - register char d = *(c++); - register unsigned char uu; - if ((d >= '0') && (d <= '9')) - uu = ((d - '0') << 4); - else if ((d >= 'a') && (d <= 'f')) - uu = ((d - ('a'-10)) << 4); - else - return (char *) 0; - d = *(c++); - if ((d >= '0') && (d <= '9')) - uu |= (d - '0'); - else if ((d >= 'a') && (d <= 'f')) - uu |= (d - ('a'-10)); - else - return (char *) 0; - *u = uu; - } - return c; -} -} - -/* Primitive types */ -%typemap(jni) bool, const bool & "jboolean" -%typemap(jni) char, const char & "jchar" -%typemap(jni) signed char, const signed char & "jbyte" -%typemap(jni) unsigned char, const unsigned char & "jshort" -%typemap(jni) short, const short & "jshort" -%typemap(jni) unsigned short, const unsigned short & "jint" -%typemap(jni) int, const int & "jint" -%typemap(jni) unsigned int, const unsigned int & "jlong" -%typemap(jni) long, const long & "jint" -%typemap(jni) unsigned long, const unsigned long & "jlong" -%typemap(jni) long long, const long long & "jlong" -%typemap(jni) unsigned long long, const unsigned long long & "jobject" -%typemap(jni) float, const float & "jfloat" -%typemap(jni) double, const double & "jdouble" -%typemap(jni) void "void" - -%typemap(jtype) bool, const bool & "boolean" -%typemap(jtype) char, const char & "char" -%typemap(jtype) signed char, const signed char & "byte" -%typemap(jtype) unsigned char, const unsigned char & "short" -%typemap(jtype) short, const short & "short" -%typemap(jtype) unsigned short, const unsigned short & "int" -%typemap(jtype) int, const int & "int" -%typemap(jtype) unsigned int, const unsigned int & "long" -%typemap(jtype) long, const long & "int" -%typemap(jtype) unsigned long, const unsigned long & "long" -%typemap(jtype) long long, const long long & "long" -%typemap(jtype) unsigned long long, const unsigned long long & "java.math.BigInteger" -%typemap(jtype) float, const float & "float" -%typemap(jtype) double, const double & "double" -%typemap(jtype) void "void" - -%typemap(jstype) bool, const bool & "boolean" -%typemap(jstype) char, const char & "char" -%typemap(jstype) signed char, const signed char & "byte" -%typemap(jstype) unsigned char, const unsigned char & "short" -%typemap(jstype) short, const short & "short" -%typemap(jstype) unsigned short, const unsigned short & "int" -%typemap(jstype) int, const int & "int" -%typemap(jstype) unsigned int, const unsigned int & "long" -%typemap(jstype) long, const long & "int" -%typemap(jstype) unsigned long, const unsigned long & "long" -%typemap(jstype) long long, const long long & "long" -%typemap(jstype) unsigned long long, const unsigned long long & "java.math.BigInteger" -%typemap(jstype) float, const float & "float" -%typemap(jstype) double, const double & "double" -%typemap(jstype) void "void" - -%typemap(jni) char *, char *&, char[ANY], char[] "jstring" -%typemap(jtype) char *, char *&, char[ANY], char[] "String" -%typemap(jstype) char *, char *&, char[ANY], char[] "String" - -/* JNI types */ -%typemap(jni) jboolean "jboolean" -%typemap(jni) jchar "jchar" -%typemap(jni) jbyte "jbyte" -%typemap(jni) jshort "jshort" -%typemap(jni) jint "jint" -%typemap(jni) jlong "jlong" -%typemap(jni) jfloat "jfloat" -%typemap(jni) jdouble "jdouble" -%typemap(jni) jstring "jstring" -%typemap(jni) jobject "jobject" -%typemap(jni) jbooleanArray "jbooleanArray" -%typemap(jni) jcharArray "jcharArray" -%typemap(jni) jbyteArray "jbyteArray" -%typemap(jni) jshortArray "jshortArray" -%typemap(jni) jintArray "jintArray" -%typemap(jni) jlongArray "jlongArray" -%typemap(jni) jfloatArray "jfloatArray" -%typemap(jni) jdoubleArray "jdoubleArray" -%typemap(jni) jobjectArray "jobjectArray" - -%typemap(jtype) jboolean "boolean" -%typemap(jtype) jchar "char" -%typemap(jtype) jbyte "byte" -%typemap(jtype) jshort "short" -%typemap(jtype) jint "int" -%typemap(jtype) jlong "long" -%typemap(jtype) jfloat "float" -%typemap(jtype) jdouble "double" -%typemap(jtype) jstring "String" -%typemap(jtype) jobject "Object" -%typemap(jtype) jbooleanArray "boolean[]" -%typemap(jtype) jcharArray "char[]" -%typemap(jtype) jbyteArray "byte[]" -%typemap(jtype) jshortArray "short[]" -%typemap(jtype) jintArray "int[]" -%typemap(jtype) jlongArray "long[]" -%typemap(jtype) jfloatArray "float[]" -%typemap(jtype) jdoubleArray "double[]" -%typemap(jtype) jobjectArray "Object[]" - -%typemap(jstype) jboolean "boolean" -%typemap(jstype) jchar "char" -%typemap(jstype) jbyte "byte" -%typemap(jstype) jshort "short" -%typemap(jstype) jint "int" -%typemap(jstype) jlong "long" -%typemap(jstype) jfloat "float" -%typemap(jstype) jdouble "double" -%typemap(jstype) jstring "String" -%typemap(jstype) jobject "Object" -%typemap(jstype) jbooleanArray "boolean[]" -%typemap(jstype) jcharArray "char[]" -%typemap(jstype) jbyteArray "byte[]" -%typemap(jstype) jshortArray "short[]" -%typemap(jstype) jintArray "int[]" -%typemap(jstype) jlongArray "long[]" -%typemap(jstype) jfloatArray "float[]" -%typemap(jstype) jdoubleArray "double[]" -%typemap(jstype) jobjectArray "Object[]" - -/* Non primitive types */ -%typemap(jni) SWIGTYPE "jlong" -%typemap(jtype) SWIGTYPE "long" -%typemap(jstype) SWIGTYPE "$&javaclassname" - -%typemap(jni) SWIGTYPE [] "jlong" -%typemap(jtype) SWIGTYPE [] "long" -%typemap(jstype) SWIGTYPE [] "$javaclassname" - -%typemap(jni) SWIGTYPE * "jlong" -%typemap(jtype) SWIGTYPE * "long" -%typemap(jstype) SWIGTYPE * "$javaclassname" - -%typemap(jni) SWIGTYPE & "jlong" -%typemap(jtype) SWIGTYPE & "long" -%typemap(jstype) SWIGTYPE & "$javaclassname" - -/* pointer to a class member */ -%typemap(jni) SWIGTYPE (CLASS::*) "jstring" -%typemap(jtype) SWIGTYPE (CLASS::*) "String" -%typemap(jstype) SWIGTYPE (CLASS::*) "$javaclassname" - -/* The following are the in, out, freearg, argout typemaps. These are the JNI code generating typemaps for converting from Java to C and visa versa. */ - -/* primitive types */ -%typemap(in) bool -%{ $1 = $input ? true : false; %} - -%typemap(directorout) bool -%{ $result = $input ? true : false; %} - -%typemap(javadirectorin) bool "$jniinput" -%typemap(javadirectorout) bool "$javacall" - -%typemap(in) char, - signed char, - unsigned char, - short, - unsigned short, - int, - unsigned int, - long, - unsigned long, - long long, - float, - double -%{ $1 = ($1_ltype)$input; %} - -%typemap(directorout) char, - signed char, - unsigned char, - short, - unsigned short, - int, - unsigned int, - long, - unsigned long, - long long, - float, - double -%{ $result = ($1_ltype)$input; %} - -%typemap(directorin, descriptor="Z") bool "$input = (jboolean) $1;" -%typemap(directorin, descriptor="C") char "$input = (jint) $1;" -%typemap(directorin, descriptor="B") signed char "$input = (jbyte) $1;" -%typemap(directorin, descriptor="S") unsigned char "$input = (jshort) $1;" -%typemap(directorin, descriptor="S") short "$input = (jshort) $1;" -%typemap(directorin, descriptor="I") unsigned short "$input = (jint) $1;" -%typemap(directorin, descriptor="I") int "$input = (jint) $1;" -%typemap(directorin, descriptor="J") unsigned int "$input = (jlong) $1;" -%typemap(directorin, descriptor="I") long "$input = (jint) $1;" -%typemap(directorin, descriptor="J") unsigned long "$input = (jlong) $1;" -%typemap(directorin, descriptor="J") long long "$input = (jlong) $1;" -%typemap(directorin, descriptor="F") float "$input = (jfloat) $1;" -%typemap(directorin, descriptor="D") double "$input = (jdouble) $1;" - -%typemap(javadirectorin) char, - signed char, - unsigned char, - short, - unsigned short, - int, - unsigned int, - long, - unsigned long, - long long, - float, - double - "$jniinput" - -%typemap(javadirectorout) char, - signed char, - unsigned char, - short, - unsigned short, - int, - unsigned int, - long, - unsigned long, - long long, - float, - double - "$javacall" - -%typemap(out) bool %{ $result = (jboolean)$1; %} -%typemap(out) char %{ $result = (jchar)$1; %} -%typemap(out) signed char %{ $result = (jbyte)$1; %} -%typemap(out) unsigned char %{ $result = (jshort)$1; %} -%typemap(out) short %{ $result = (jshort)$1; %} -%typemap(out) unsigned short %{ $result = (jint)$1; %} -%typemap(out) int %{ $result = (jint)$1; %} -%typemap(out) unsigned int %{ $result = (jlong)$1; %} -%typemap(out) long %{ $result = (jint)$1; %} -%typemap(out) unsigned long %{ $result = (jlong)$1; %} -%typemap(out) long long %{ $result = (jlong)$1; %} -%typemap(out) float %{ $result = (jfloat)$1; %} -%typemap(out) double %{ $result = (jdouble)$1; %} - -/* unsigned long long */ -/* Convert from BigInteger using the toByteArray member function */ -%typemap(in) unsigned long long { - jclass clazz; - jmethodID mid; - jbyteArray ba; - jbyte* bae; - jsize sz; - int i; - - if (!$input) { - SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "BigInteger null"); - return $null; - } - clazz = JCALL1(GetObjectClass, jenv, $input); - mid = JCALL3(GetMethodID, jenv, clazz, "toByteArray", "()[B"); - ba = (jbyteArray)JCALL2(CallObjectMethod, jenv, $input, mid); - bae = JCALL2(GetByteArrayElements, jenv, ba, 0); - sz = JCALL1(GetArrayLength, jenv, ba); - $1 = 0; - for(i=0; i", "([B)V"); - jobject bigint; - int i; - - bae[0] = 0; - for(i=1; i<9; i++ ) { - bae[i] = (jbyte)($1>>8*(8-i)); - } - - JCALL3(ReleaseByteArrayElements, jenv, ba, bae, 0); - bigint = JCALL3(NewObject, jenv, clazz, mid, ba); - $result = bigint; -} - -/* Convert to BigInteger (see out typemap) */ -%typemap(directorin, descriptor="Ljava/math/BigInteger;") unsigned long long, const unsigned long long & { - jbyteArray ba = JCALL1(NewByteArray, jenv, 9); - jbyte* bae = JCALL2(GetByteArrayElements, jenv, ba, 0); - jclass clazz = JCALL1(FindClass, jenv, "java/math/BigInteger"); - jmethodID mid = JCALL3(GetMethodID, jenv, clazz, "", "([B)V"); - jobject bigint; - int swig_i; - - bae[0] = 0; - for(swig_i=1; swig_i<9; swig_i++ ) { - bae[swig_i] = (jbyte)($1>>8*(8-swig_i)); - } - - JCALL3(ReleaseByteArrayElements, jenv, ba, bae, 0); - bigint = JCALL3(NewObject, jenv, clazz, mid, ba); - $input = bigint; -} - -%typemap(javadirectorin) unsigned long long "$jniinput" -%typemap(javadirectorout) unsigned long long "$javacall" - -/* char * - treat as String */ -%typemap(in, noblock=1) char * { - $1 = 0; - if ($input) { - $1 = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); - if (!$1) return $null; - } -} - -%typemap(directorout, noblock=1, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) char * { - $1 = 0; - if ($input) { - $result = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); - if (!$result) return $null; - } -} - -%typemap(directorin, descriptor="Ljava/lang/String;", noblock=1) char * { - $input = 0; - if ($1) { - $input = JCALL1(NewStringUTF, jenv, (const char *)$1); - if (!$input) return $null; - } -} - -%typemap(freearg, noblock=1) char * { if ($1) JCALL2(ReleaseStringUTFChars, jenv, $input, (const char *)$1); } -%typemap(out, noblock=1) char * { if ($1) $result = JCALL1(NewStringUTF, jenv, (const char *)$1); } -%typemap(javadirectorin) char * "$jniinput" -%typemap(javadirectorout) char * "$javacall" - -/* char *& - treat as String */ -%typemap(in, noblock=1) char *& ($*1_ltype temp = 0) { - $1 = 0; - if ($input) { - temp = ($*1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); - if (!temp) return $null; - } - $1 = &temp; -} -%typemap(freearg, noblock=1) char *& { if ($1 && *$1) JCALL2(ReleaseStringUTFChars, jenv, $input, (const char *)*$1); } -%typemap(out, noblock=1) char *& { if (*$1) $result = JCALL1(NewStringUTF, jenv, (const char *)*$1); } - -%typemap(out) void "" -%typemap(javadirectorin) void "$jniinput" -%typemap(javadirectorout) void "$javacall" -%typemap(directorin, descriptor="V") void "" - -/* primitive types by reference */ -%typemap(in) const bool & ($*1_ltype temp) -%{ temp = $input ? true : false; - $1 = &temp; %} - -%typemap(directorout,warning=SWIGWARN_TYPEMAP_THREAD_UNSAFE_MSG) const bool & -%{ static $*1_ltype temp; - temp = $input ? true : false; - $result = &temp; %} - -%typemap(javadirectorin) const bool & "$jniinput" -%typemap(javadirectorout) const bool & "$javacall" - -%typemap(in) const char & ($*1_ltype temp), - const signed char & ($*1_ltype temp), - const unsigned char & ($*1_ltype temp), - const short & ($*1_ltype temp), - const unsigned short & ($*1_ltype temp), - const int & ($*1_ltype temp), - const unsigned int & ($*1_ltype temp), - const long & ($*1_ltype temp), - const unsigned long & ($*1_ltype temp), - const long long & ($*1_ltype temp), - const float & ($*1_ltype temp), - const double & ($*1_ltype temp) -%{ temp = ($*1_ltype)$input; - $1 = &temp; %} - -%typemap(directorout,warning=SWIGWARN_TYPEMAP_THREAD_UNSAFE_MSG) const char &, - const signed char &, - const unsigned char &, - const short &, - const unsigned short &, - const int &, - const unsigned int &, - const long &, - const unsigned long &, - const long long &, - const float &, - const double & -%{ static $*1_ltype temp; - temp = ($*1_ltype)$input; - $result = &temp; %} - -%typemap(directorin, descriptor="Z") const bool & "$input = (jboolean)$1;" -%typemap(directorin, descriptor="C") const char & "$input = (jchar)$1;" -%typemap(directorin, descriptor="B") const signed char & "$input = (jbyte)$1;" -%typemap(directorin, descriptor="S") const unsigned char & "$input = (jshort)$1;" -%typemap(directorin, descriptor="S") const short & "$input = (jshort)$1;" -%typemap(directorin, descriptor="I") const unsigned short & "$input = (jint)$1;" -%typemap(directorin, descriptor="I") const int & "$input = (jint)$1;" -%typemap(directorin, descriptor="J") const unsigned int & "$input = (jlong)$1;" -%typemap(directorin, descriptor="I") const long & "$input = (jint)$1;" -%typemap(directorin, descriptor="J") const unsigned long & "$input = (jlong)$1;" -%typemap(directorin, descriptor="J") const long long & "$input = (jlong)$1;" -%typemap(directorin, descriptor="F") const float & "$input = (jfloat)$1;" -%typemap(directorin, descriptor="D") const double & "$input = (jdouble)$1;" - -%typemap(javadirectorin) const char & ($*1_ltype temp), - const signed char & ($*1_ltype temp), - const unsigned char & ($*1_ltype temp), - const short & ($*1_ltype temp), - const unsigned short & ($*1_ltype temp), - const int & ($*1_ltype temp), - const unsigned int & ($*1_ltype temp), - const long & ($*1_ltype temp), - const unsigned long & ($*1_ltype temp), - const long long & ($*1_ltype temp), - const float & ($*1_ltype temp), - const double & ($*1_ltype temp) - "$jniinput" - -%typemap(javadirectorout) const char & ($*1_ltype temp), - const signed char & ($*1_ltype temp), - const unsigned char & ($*1_ltype temp), - const short & ($*1_ltype temp), - const unsigned short & ($*1_ltype temp), - const int & ($*1_ltype temp), - const unsigned int & ($*1_ltype temp), - const long & ($*1_ltype temp), - const unsigned long & ($*1_ltype temp), - const long long & ($*1_ltype temp), - const float & ($*1_ltype temp), - const double & ($*1_ltype temp) - "$javacall" - - -%typemap(out) const bool & %{ $result = (jboolean)*$1; %} -%typemap(out) const char & %{ $result = (jchar)*$1; %} -%typemap(out) const signed char & %{ $result = (jbyte)*$1; %} -%typemap(out) const unsigned char & %{ $result = (jshort)*$1; %} -%typemap(out) const short & %{ $result = (jshort)*$1; %} -%typemap(out) const unsigned short & %{ $result = (jint)*$1; %} -%typemap(out) const int & %{ $result = (jint)*$1; %} -%typemap(out) const unsigned int & %{ $result = (jlong)*$1; %} -%typemap(out) const long & %{ $result = (jint)*$1; %} -%typemap(out) const unsigned long & %{ $result = (jlong)*$1; %} -%typemap(out) const long long & %{ $result = (jlong)*$1; %} -%typemap(out) const float & %{ $result = (jfloat)*$1; %} -%typemap(out) const double & %{ $result = (jdouble)*$1; %} - -/* const unsigned long long & */ -/* Similar to unsigned long long */ -%typemap(in) const unsigned long long & ($*1_ltype temp) { - jclass clazz; - jmethodID mid; - jbyteArray ba; - jbyte* bae; - jsize sz; - int i; - - if (!$input) { - SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "BigInteger null"); - return $null; - } - clazz = JCALL1(GetObjectClass, jenv, $input); - mid = JCALL3(GetMethodID, jenv, clazz, "toByteArray", "()[B"); - ba = (jbyteArray)JCALL2(CallObjectMethod, jenv, $input, mid); - bae = JCALL2(GetByteArrayElements, jenv, ba, 0); - sz = JCALL1(GetArrayLength, jenv, ba); - $1 = &temp; - temp = 0; - for(i=0; i", "([B)V"); - jobject bigint; - int i; - - bae[0] = 0; - for(i=1; i<9; i++ ) { - bae[i] = (jbyte)(*$1>>8*(8-i)); - } - - JCALL3(ReleaseByteArrayElements, jenv, ba, bae, 0); - bigint = JCALL3(NewObject, jenv, clazz, mid, ba); - $result = bigint; -} - -%typemap(javadirectorin) const unsigned long long & "$jniinput" -%typemap(javadirectorout) const unsigned long long & "$javacall" - -/* Default handling. Object passed by value. Convert to a pointer */ -%typemap(in) SWIGTYPE ($&1_type argp) -%{ argp = *($&1_ltype*)&$input; - if (!argp) { - SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "Attempt to dereference null $1_type"); - return $null; - } - $1 = *argp; %} - -%typemap(directorout) SWIGTYPE ($&1_type argp) -%{ argp = *($&1_ltype*)&$input; - if (!argp) { - SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "Unexpected null return for type $1_type"); - return $null; - } - $result = *argp; %} - -%typemap(out) SWIGTYPE -#ifdef __cplusplus -%{ *($&1_ltype*)&$result = new $1_ltype((const $1_ltype &)$1); %} -#else -{ - $&1_ltype $1ptr = ($&1_ltype) malloc(sizeof($1_ltype)); - memmove($1ptr, &$1, sizeof($1_type)); - *($&1_ltype*)&$result = $1ptr; -} -#endif - -%typemap(directorin,descriptor="L$packagepath/$&javaclassname;") SWIGTYPE -%{ $input = 0; - *(($&1_ltype*)&$input) = &$1; %} -%typemap(javadirectorin) SWIGTYPE "new $&javaclassname($jniinput, false)" -%typemap(javadirectorout) SWIGTYPE "$&javaclassname.getCPtr($javacall)" - -/* Generic pointers and references */ -%typemap(in) SWIGTYPE * %{ $1 = *($&1_ltype)&$input; %} -%typemap(in, fragment="SWIG_UnPackData") SWIGTYPE (CLASS::*) { - const char *temp = 0; - if ($input) { - temp = JCALL2(GetStringUTFChars, jenv, $input, 0); - if (!temp) return $null; - } - SWIG_UnpackData(temp, (void *)&$1, sizeof($1)); -} -%typemap(in) SWIGTYPE & %{ $1 = *($&1_ltype)&$input; - if (!$1) { - SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "$1_type reference is null"); - return $null; - } %} -%typemap(out) SWIGTYPE * -%{ *($&1_ltype)&$result = $1; %} -%typemap(out, fragment="SWIG_PackData", noblock=1) SWIGTYPE (CLASS::*) { - char buf[128]; - char *data = SWIG_PackData(buf, (void *)&$1, sizeof($1)); - *data = '\0'; - $result = JCALL1(NewStringUTF, jenv, buf); -} -%typemap(out) SWIGTYPE & -%{ *($&1_ltype)&$result = $1; %} - -%typemap(directorout, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) SWIGTYPE * -%{ $result = *($&1_ltype)&$input; %} -%typemap(directorout, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) SWIGTYPE (CLASS::*) -%{ $result = *($&1_ltype)&$input; %} - -%typemap(directorin,descriptor="L$packagepath/$javaclassname;") SWIGTYPE * -%{ *(($&1_ltype)&$input) = ($1_ltype) $1; %} -%typemap(directorin,descriptor="L$packagepath/$javaclassname;") SWIGTYPE (CLASS::*) -%{ *(($&1_ltype)&$input) = ($1_ltype) $1; %} - -%typemap(directorout, warning=SWIGWARN_TYPEMAP_DIRECTOROUT_PTR_MSG) SWIGTYPE & -%{ if (!$input) { - SWIG_JavaThrowException(jenv, SWIG_JavaNullPointerException, "Unexpected null return for type $1_type"); - return $null; - } - $result = *($&1_ltype)&$input; %} -%typemap(directorin,descriptor="L$packagepath/$javaclassname;") SWIGTYPE & -%{ *($&1_ltype)&$input = ($1_ltype) &$1; %} - -%typemap(javadirectorin) SWIGTYPE *, SWIGTYPE (CLASS::*) "($jniinput == 0) ? null : new $javaclassname($jniinput, false)" -%typemap(javadirectorin) SWIGTYPE & "new $javaclassname($jniinput, false)" -%typemap(javadirectorout) SWIGTYPE *, SWIGTYPE (CLASS::*), SWIGTYPE & "$javaclassname.getCPtr($javacall)" - -/* Default array handling */ -%typemap(in) SWIGTYPE [] %{ $1 = *($&1_ltype)&$input; %} -%typemap(out) SWIGTYPE [] %{ *($&1_ltype)&$result = $1; %} -%typemap(freearg) SWIGTYPE [ANY], SWIGTYPE [] "" - -/* char arrays - treat as String */ -%typemap(in, noblock=1) char[ANY], char[] { - $1 = 0; - if ($input) { - $1 = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); - if (!$1) return $null; - } -} - -%typemap(directorout, noblock=1) char[ANY], char[] { - $1 = 0; - if ($input) { - $result = ($1_ltype)JCALL2(GetStringUTFChars, jenv, $input, 0); - if (!$result) return $null; - } -} - -%typemap(directorin, descriptor="Ljava/lang/String;", noblock=1) char[ANY], char[] { - $input = 0; - if ($1) { - $input = JCALL1(NewStringUTF, jenv, (const char *)$1); - if (!$input) return $null; - } -} - -%typemap(argout) char[ANY], char[] "" -%typemap(freearg, noblock=1) char[ANY], char[] { if ($1) JCALL2(ReleaseStringUTFChars, jenv, $input, (const char *)$1); } -%typemap(out, noblock=1) char[ANY], char[] { if ($1) $result = JCALL1(NewStringUTF, jenv, (const char *)$1); } -%typemap(javadirectorin) char[ANY], char[] "$jniinput" -%typemap(javadirectorout) char[ANY], char[] "$javacall" - -/* JNI types */ -%typemap(in) jboolean, - jchar, - jbyte, - jshort, - jint, - jlong, - jfloat, - jdouble, - jstring, - jobject, - jbooleanArray, - jcharArray, - jbyteArray, - jshortArray, - jintArray, - jlongArray, - jfloatArray, - jdoubleArray, - jobjectArray -%{ $1 = $input; %} - -%typemap(directorout) jboolean, - jchar, - jbyte, - jshort, - jint, - jlong, - jfloat, - jdouble, - jstring, - jobject, - jbooleanArray, - jcharArray, - jbyteArray, - jshortArray, - jintArray, - jlongArray, - jfloatArray, - jdoubleArray, - jobjectArray -%{ $result = $input; %} - -%typemap(out) jboolean, - jchar, - jbyte, - jshort, - jint, - jlong, - jfloat, - jdouble, - jstring, - jobject, - jbooleanArray, - jcharArray, - jbyteArray, - jshortArray, - jintArray, - jlongArray, - jfloatArray, - jdoubleArray, - jobjectArray -%{ $result = $1; %} - -%typemap(directorin,descriptor="Z") jboolean "$input = $1;" -%typemap(directorin,descriptor="C") jchar "$input = $1;" -%typemap(directorin,descriptor="B") jbyte "$input = $1;" -%typemap(directorin,descriptor="S") jshort "$input = $1;" -%typemap(directorin,descriptor="I") jint "$input = $1;" -%typemap(directorin,descriptor="J") jlong "$input = $1;" -%typemap(directorin,descriptor="F") jfloat "$input = $1;" -%typemap(directorin,descriptor="D") jdouble "$input = $1;" -%typemap(directorin,descriptor="Ljava/lang/String;") jstring "$input = $1;" -%typemap(directorin,descriptor="Ljava/lang/Object;",nouse="1") jobject "$input = $1;" -%typemap(directorin,descriptor="[Z") jbooleanArray "$input = $1;" -%typemap(directorin,descriptor="[C") jcharArray "$input = $1;" -%typemap(directorin,descriptor="[B") jbyteArray "$input = $1;" -%typemap(directorin,descriptor="[S") jshortArray "$input = $1;" -%typemap(directorin,descriptor="[I") jintArray "$input = $1;" -%typemap(directorin,descriptor="[J") jlongArray "$input = $1;" -%typemap(directorin,descriptor="[F") jfloatArray "$input = $1;" -%typemap(directorin,descriptor="[D") jdoubleArray "$input = $1;" -%typemap(directorin,descriptor="[Ljava/lang/Object;",nouse="1") jobjectArray "$input = $1;" - -%typemap(javadirectorin) jboolean, - jchar, - jbyte, - jshort, - jint, - jlong, - jfloat, - jdouble, - jstring, - jobject, - jbooleanArray, - jcharArray, - jbyteArray, - jshortArray, - jintArray, - jlongArray, - jfloatArray, - jdoubleArray, - jobjectArray - "$jniinput" - -%typemap(javadirectorout) jboolean, - jchar, - jbyte, - jshort, - jint, - jlong, - jfloat, - jdouble, - jstring, - jobject, - jbooleanArray, - jcharArray, - jbyteArray, - jshortArray, - jintArray, - jlongArray, - jfloatArray, - jdoubleArray, - jobjectArray - "$javacall" - -/* Typecheck typemaps - The purpose of these is merely to issue a warning for overloaded C++ functions - * that cannot be overloaded in Java as more than one C++ type maps to a single Java type */ - -%typecheck(SWIG_TYPECHECK_BOOL) /* Java boolean */ - jboolean, - bool, - const bool & - "" - -%typecheck(SWIG_TYPECHECK_CHAR) /* Java char */ - jchar, - char, - const char & - "" - -%typecheck(SWIG_TYPECHECK_INT8) /* Java byte */ - jbyte, - signed char, - const signed char & - "" - -%typecheck(SWIG_TYPECHECK_INT16) /* Java short */ - jshort, - unsigned char, - short, - const unsigned char &, - const short & - "" - -%typecheck(SWIG_TYPECHECK_INT32) /* Java int */ - jint, - unsigned short, - int, - long, - const unsigned short &, - const int &, - const long & - "" - -%typecheck(SWIG_TYPECHECK_INT64) /* Java long */ - jlong, - unsigned int, - unsigned long, - long long, - const unsigned int &, - const unsigned long &, - const long long & - "" - -%typecheck(SWIG_TYPECHECK_INT128) /* Java BigInteger */ - unsigned long long, - const unsigned long long & - "" - -%typecheck(SWIG_TYPECHECK_FLOAT) /* Java float */ - jfloat, - float, - const float & - "" - -%typecheck(SWIG_TYPECHECK_DOUBLE) /* Java double */ - jdouble, - double, - const double & - "" - -%typecheck(SWIG_TYPECHECK_STRING) /* Java String */ - jstring, - char *, - char *&, - char[ANY], - char [] - "" - -%typecheck(SWIG_TYPECHECK_BOOL_ARRAY) /* Java boolean[] */ - jbooleanArray - "" - -%typecheck(SWIG_TYPECHECK_CHAR_ARRAY) /* Java char[] */ - jcharArray - "" - -%typecheck(SWIG_TYPECHECK_INT8_ARRAY) /* Java byte[] */ - jbyteArray - "" - -%typecheck(SWIG_TYPECHECK_INT16_ARRAY) /* Java short[] */ - jshortArray - "" - -%typecheck(SWIG_TYPECHECK_INT32_ARRAY) /* Java int[] */ - jintArray - "" - -%typecheck(SWIG_TYPECHECK_INT64_ARRAY) /* Java long[] */ - jlongArray - "" - -%typecheck(SWIG_TYPECHECK_FLOAT_ARRAY) /* Java float[] */ - jfloatArray - "" - -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY) /* Java double[] */ - jdoubleArray - "" - -%typecheck(SWIG_TYPECHECK_OBJECT_ARRAY) /* Java jobject[] */ - jobjectArray - "" - -%typecheck(SWIG_TYPECHECK_POINTER) /* Default */ - SWIGTYPE, - SWIGTYPE *, - SWIGTYPE &, - SWIGTYPE *const&, - SWIGTYPE [], - SWIGTYPE (CLASS::*) - "" - - -/* Exception handling */ - -%typemap(throws) int, - long, - short, - unsigned int, - unsigned long, - unsigned short -%{ char error_msg[256]; - sprintf(error_msg, "C++ $1_type exception thrown, value: %d", $1); - SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, error_msg); - return $null; %} - -%typemap(throws) SWIGTYPE, SWIGTYPE &, SWIGTYPE *, SWIGTYPE [], SWIGTYPE [ANY] -%{ (void)$1; - SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "C++ $1_type exception thrown"); - return $null; %} - -%typemap(throws) char * -%{ SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, $1); - return $null; %} - - -/* Typemaps for code generation in proxy classes and Java type wrapper classes */ - -/* The javain typemap is used for converting function parameter types from the type - * used in the proxy, module or type wrapper class to the type used in the JNI class. */ -%typemap(javain) bool, const bool &, - char, const char &, - signed char, const signed char &, - unsigned char, const unsigned char &, - short, const short &, - unsigned short, const unsigned short &, - int, const int &, - unsigned int, const unsigned int &, - long, const long &, - unsigned long, const unsigned long &, - long long, const long long &, - unsigned long long, const unsigned long long &, - float, const float &, - double, const double & - "$javainput" -%typemap(javain) char *, char *&, char[ANY], char[] "$javainput" -%typemap(javain) jboolean, - jchar, - jbyte, - jshort, - jint, - jlong, - jfloat, - jdouble, - jstring, - jobject, - jbooleanArray, - jcharArray, - jbyteArray, - jshortArray, - jintArray, - jlongArray, - jfloatArray, - jdoubleArray, - jobjectArray - "$javainput" -%typemap(javain) SWIGTYPE "$&javaclassname.getCPtr($javainput)" -%typemap(javain) SWIGTYPE *, SWIGTYPE &, SWIGTYPE [] "$javaclassname.getCPtr($javainput)" -%typemap(javain) SWIGTYPE (CLASS::*) "$javaclassname.getCMemberPtr($javainput)" - -/* The javaout typemap is used for converting function return types from the return type - * used in the JNI class to the type returned by the proxy, module or type wrapper class. */ -%typemap(javaout) bool, const bool &, - char, const char &, - signed char, const signed char &, - unsigned char, const unsigned char &, - short, const short &, - unsigned short, const unsigned short &, - int, const int &, - unsigned int, const unsigned int &, - long, const long &, - unsigned long, const unsigned long &, - long long, const long long &, - unsigned long long, const unsigned long long &, - float, const float &, - double, const double & { - return $jnicall; - } -%typemap(javaout) char *, char *&, char[ANY], char[] { - return $jnicall; - } -%typemap(javaout) jboolean, - jchar, - jbyte, - jshort, - jint, - jlong, - jfloat, - jdouble, - jstring, - jobject, - jbooleanArray, - jcharArray, - jbyteArray, - jshortArray, - jintArray, - jlongArray, - jfloatArray, - jdoubleArray, - jobjectArray { - return $jnicall; - } -%typemap(javaout) void { - $jnicall; - } -%typemap(javaout) SWIGTYPE { - return new $&javaclassname($jnicall, true); - } -%typemap(javaout) SWIGTYPE & { - return new $javaclassname($jnicall, $owner); - } -%typemap(javaout) SWIGTYPE *, SWIGTYPE [] { - long cPtr = $jnicall; - return (cPtr == 0) ? null : new $javaclassname(cPtr, $owner); - } -%typemap(javaout) SWIGTYPE (CLASS::*) { - String cMemberPtr = $jnicall; - return (cMemberPtr == null) ? null : new $javaclassname(cMemberPtr, $owner); - } - -/* Pointer reference typemaps */ -%typemap(jni) SWIGTYPE *const& "jlong" -%typemap(jtype) SWIGTYPE *const& "long" -%typemap(jstype) SWIGTYPE *const& "$*javaclassname" -%typemap(javain) SWIGTYPE *const& "$*javaclassname.getCPtr($javainput)" -%typemap(javaout) SWIGTYPE *const& { - long cPtr = $jnicall; - return (cPtr == 0) ? null : new $*javaclassname(cPtr, $owner); - } -%typemap(in) SWIGTYPE *const& ($*1_ltype temp = 0) -%{ temp = *($1_ltype)&$input; - $1 = ($1_ltype)&temp; %} -%typemap(out) SWIGTYPE *const& -%{ *($1_ltype)&$result = *$1; %} - -/* Typemaps used for the generation of proxy and type wrapper class code */ -%typemap(javabase) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" -%typemap(javaclassmodifiers) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "public class" -%typemap(javacode) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" -%typemap(javaimports) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" -%typemap(javainterfaces) SWIGTYPE, SWIGTYPE *, SWIGTYPE &, SWIGTYPE [], SWIGTYPE (CLASS::*) "" - -/* javabody typemaps */ - -%define SWIG_JAVABODY_METHODS(PTRCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE...) SWIG_JAVABODY_PROXY(PTRCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE) %enddef // legacy name - -%define SWIG_JAVABODY_PROXY(PTRCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE...) -// Base proxy classes -%typemap(javabody) TYPE %{ - private long swigCPtr; - protected boolean swigCMemOwn; - - PTRCTOR_VISIBILITY $javaclassname(long cPtr, boolean cMemoryOwn) { - swigCMemOwn = cMemoryOwn; - swigCPtr = cPtr; - } - - CPTR_VISIBILITY static long getCPtr($javaclassname obj) { - return (obj == null) ? 0 : obj.swigCPtr; - } -%} - -// Derived proxy classes -%typemap(javabody_derived) TYPE %{ - private long swigCPtr; - - PTRCTOR_VISIBILITY $javaclassname(long cPtr, boolean cMemoryOwn) { - super($imclassname.$javaclazznameSWIGUpcast(cPtr), cMemoryOwn); - swigCPtr = cPtr; - } - - CPTR_VISIBILITY static long getCPtr($javaclassname obj) { - return (obj == null) ? 0 : obj.swigCPtr; - } -%} -%enddef - -%define SWIG_JAVABODY_TYPEWRAPPER(PTRCTOR_VISIBILITY, DEFAULTCTOR_VISIBILITY, CPTR_VISIBILITY, TYPE...) -// Typewrapper classes -%typemap(javabody) TYPE *, TYPE &, TYPE [] %{ - private long swigCPtr; - - PTRCTOR_VISIBILITY $javaclassname(long cPtr, boolean futureUse) { - swigCPtr = cPtr; - } - - DEFAULTCTOR_VISIBILITY $javaclassname() { - swigCPtr = 0; - } - - CPTR_VISIBILITY static long getCPtr($javaclassname obj) { - return (obj == null) ? 0 : obj.swigCPtr; - } -%} - -%typemap(javabody) TYPE (CLASS::*) %{ - private String swigCMemberPtr; - - PTRCTOR_VISIBILITY $javaclassname(String cMemberPtr, boolean futureUse) { - swigCMemberPtr = cMemberPtr; - } - - DEFAULTCTOR_VISIBILITY $javaclassname() { - swigCMemberPtr = null; - } - - CPTR_VISIBILITY static String getCMemberPtr($javaclassname obj) { - return obj.swigCMemberPtr; - } -%} -%enddef - -/* Set the default javabody typemaps to use protected visibility. - Use the macros to change to public if using multiple modules. */ -SWIG_JAVABODY_PROXY(protected, protected, SWIGTYPE) -SWIG_JAVABODY_TYPEWRAPPER(protected, protected, protected, SWIGTYPE) - -%typemap(javafinalize) SWIGTYPE %{ - protected void finalize() { - delete(); - } -%} - -/* - * Java constructor typemaps: - * - * The javaconstruct typemap is inserted when a proxy class's constructor is generated. - * This typemap allows control over what code is executed in the constructor as - * well as specifying who owns the underlying C/C++ object. Normally, Java has - * ownership and the underlying C/C++ object is deallocated when the Java object - * is finalized (swigCMemOwn is true.) If swigCMemOwn is false, C/C++ is - * ultimately responsible for deallocating the underlying object's memory. - * - * The SWIG_PROXY_CONSTRUCTOR macro defines the javaconstruct typemap for a proxy - * class for a particular TYPENAME. OWNERSHIP is passed as the value of - * swigCMemOwn to the pointer constructor method. WEAKREF determines which kind - * of Java object reference will be used by the C++ director class (WeakGlobalRef - * vs. GlobalRef.) - * - * The SWIG_DIRECTOR_OWNED macro sets the ownership of director-based proxy - * classes and the weak reference flag to false, meaning that the underlying C++ - * object will be reclaimed by C++. - */ - -%define SWIG_PROXY_CONSTRUCTOR(OWNERSHIP, WEAKREF, TYPENAME...) -%typemap(javaconstruct,directorconnect="\n $imclassname.$javaclazznamedirector_connect(this, swigCPtr, swigCMemOwn, WEAKREF);") TYPENAME { - this($imcall, OWNERSHIP);$directorconnect - } -%enddef - -%define SWIG_DIRECTOR_OWNED(TYPENAME...) -SWIG_PROXY_CONSTRUCTOR(true, false, TYPENAME) -%enddef - -// Set the default for SWIGTYPE: Java owns the C/C++ object. -SWIG_PROXY_CONSTRUCTOR(true, true, SWIGTYPE) - -%typemap(javadestruct, methodname="delete", methodmodifiers="public synchronized") SWIGTYPE { - if (swigCPtr != 0) { - if (swigCMemOwn) { - swigCMemOwn = false; - $jnicall; - } - swigCPtr = 0; - } - } - -%typemap(javadestruct_derived, methodname="delete", methodmodifiers="public synchronized") SWIGTYPE { - if (swigCPtr != 0) { - if (swigCMemOwn) { - swigCMemOwn = false; - $jnicall; - } - swigCPtr = 0; - } - super.delete(); - } - -%typemap(directordisconnect, methodname="swigDirectorDisconnect") SWIGTYPE %{ - protected void $methodname() { - swigCMemOwn = false; - $jnicall; - } -%} - -%typemap(directorowner_release, methodname="swigReleaseOwnership") SWIGTYPE %{ - public void $methodname() { - swigCMemOwn = false; - $jnicall; - } -%} - -%typemap(directorowner_take, methodname="swigTakeOwnership") SWIGTYPE %{ - public void $methodname() { - swigCMemOwn = true; - $jnicall; - } -%} - -/* Java specific directives */ -#define %javaconst(flag) %feature("java:const","flag") -#define %javaconstvalue(value) %feature("java:constvalue",value) -#define %javaenum(wrapapproach) %feature("java:enum","wrapapproach") -#define %javamethodmodifiers %feature("java:methodmodifiers") -#define %javaexception(exceptionclasses) %feature("except",throws=exceptionclasses) -#define %nojavaexception %feature("except","0",throws="") -#define %clearjavaexception %feature("except","",throws="") - -%pragma(java) jniclassclassmodifiers="public class" -%pragma(java) moduleclassmodifiers="public class" - -/* Some ANSI C typemaps */ - -%apply unsigned long { size_t }; -%apply const unsigned long & { const size_t & }; - -/* Array reference typemaps */ -%apply SWIGTYPE & { SWIGTYPE ((&)[ANY]) } - -/* const pointers */ -%apply SWIGTYPE * { SWIGTYPE *const } - -/* String & length */ -%typemap(jni) (char *STRING, size_t LENGTH) "jbyteArray" -%typemap(jtype) (char *STRING, size_t LENGTH) "byte[]" -%typemap(jstype) (char *STRING, size_t LENGTH) "byte[]" -%typemap(javain) (char *STRING, size_t LENGTH) "$javainput" -%typemap(freearg) (char *STRING, size_t LENGTH) "" -%typemap(in) (char *STRING, size_t LENGTH) { - if ($input) { - $1 = (char *) JCALL2(GetByteArrayElements, jenv, $input, 0); - $2 = (size_t) JCALL1(GetArrayLength, jenv, $input); - } else { - $1 = 0; - $2 = 0; - } -} -%typemap(argout) (char *STRING, size_t LENGTH) { - if ($input) JCALL3(ReleaseByteArrayElements, jenv, $input, (jbyte *)$1, 0); -} -%typemap(directorin, descriptor="[B") (char *STRING, size_t LENGTH) { - jbyteArray jb = (jenv)->NewByteArray($2); - (jenv)->SetByteArrayRegion(jb, 0, $2, (jbyte *)$1); - $input = jb; -} -%typemap(directorargout) (char *STRING, size_t LENGTH) -%{(jenv)->GetByteArrayRegion($input, 0, $2, (jbyte *)$1); %} -%apply (char *STRING, size_t LENGTH) { (char *STRING, int LENGTH) } - -/* java keywords */ -%include - -// Default enum handling -%include - diff --git a/tests/examplefiles/swig_std_vector.i b/tests/examplefiles/swig_std_vector.i deleted file mode 100644 index baecf850..00000000 --- a/tests/examplefiles/swig_std_vector.i +++ /dev/null @@ -1,225 +0,0 @@ -// -// std::vector -// - -%include - -// Vector - -%define %std_vector_methods(vector...) - %std_sequence_methods(vector) - - void reserve(size_type n); - size_type capacity() const; -%enddef - - -%define %std_vector_methods_val(vector...) - %std_sequence_methods_val(vector) - - void reserve(size_type n); - size_type capacity() const; -%enddef - - -// ------------------------------------------------------------------------ -// std::vector -// -// The aim of all that follows would be to integrate std::vector with -// as much as possible, namely, to allow the user to pass and -// be returned tuples or lists. -// const declarations are used to guess the intent of the function being -// exported; therefore, the following rationale is applied: -// -// -- f(std::vector), f(const std::vector&): -// the parameter being read-only, either a sequence or a -// previously wrapped std::vector can be passed. -// -- f(std::vector&), f(std::vector*): -// the parameter may be modified; therefore, only a wrapped std::vector -// can be passed. -// -- std::vector f(), const std::vector& f(): -// the vector is returned by copy; therefore, a sequence of T:s -// is returned which is most easily used in other functions -// -- std::vector& f(), std::vector* f(): -// the vector is returned by reference; therefore, a wrapped std::vector -// is returned -// -- const std::vector* f(), f(const std::vector*): -// for consistency, they expect and return a plain vector pointer. -// ------------------------------------------------------------------------ - -%{ -#include -%} - -// exported classes - - -namespace std { - - template > - class vector { - public: - typedef size_t size_type; - typedef ptrdiff_t difference_type; - typedef _Tp value_type; - typedef value_type* pointer; - typedef const value_type* const_pointer; - typedef _Tp& reference; - typedef const _Tp& const_reference; - typedef _Alloc allocator_type; - - %traits_swigtype(_Tp); - %traits_enum(_Tp); - - %fragment(SWIG_Traits_frag(std::vector<_Tp, _Alloc >), "header", - fragment=SWIG_Traits_frag(_Tp), - fragment="StdVectorTraits") { - namespace swig { - template <> struct traits > { - typedef pointer_category category; - static const char* type_name() { - return "std::vector<" #_Tp "," #_Alloc " >"; - } - }; - } - } - - %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector<_Tp, _Alloc >); - -#ifdef %swig_vector_methods - // Add swig/language extra methods - %swig_vector_methods(std::vector<_Tp, _Alloc >); -#endif - - %std_vector_methods(vector); - }; - - // *** - // This specialization should disappear or get simplified when - // a 'const SWIGTYPE*&' can be defined - // *** - template - class vector<_Tp*, _Alloc > { - public: - typedef size_t size_type; - typedef ptrdiff_t difference_type; - typedef _Tp* value_type; - typedef value_type* pointer; - typedef const value_type* const_pointer; - typedef value_type reference; - typedef value_type const_reference; - typedef _Alloc allocator_type; - - %traits_swigtype(_Tp); - - %fragment(SWIG_Traits_frag(std::vector<_Tp*, _Alloc >), "header", - fragment=SWIG_Traits_frag(_Tp), - fragment="StdVectorTraits") { - namespace swig { - template <> struct traits > { - typedef value_category category; - static const char* type_name() { - return "std::vector<" #_Tp " *," #_Alloc " >"; - } - }; - } - } - - %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector<_Tp*, _Alloc >); - -#ifdef %swig_vector_methods_val - // Add swig/language extra methods - %swig_vector_methods_val(std::vector<_Tp*, _Alloc >); -#endif - - %std_vector_methods_val(vector); - }; - - // *** - // const pointer specialization - // *** - template - class vector<_Tp const *, _Alloc > { - public: - typedef size_t size_type; - typedef ptrdiff_t difference_type; - typedef _Tp const * value_type; - typedef value_type* pointer; - typedef const value_type* const_pointer; - typedef value_type reference; - typedef value_type const_reference; - typedef _Alloc allocator_type; - - %traits_swigtype(_Tp); - - %fragment(SWIG_Traits_frag(std::vector<_Tp const*, _Alloc >), "header", - fragment=SWIG_Traits_frag(_Tp), - fragment="StdVectorTraits") { - namespace swig { - template <> struct traits > { - typedef value_category category; - static const char* type_name() { - return "std::vector<" #_Tp " const*," #_Alloc " >"; - } - }; - } - } - - %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector<_Tp const*, _Alloc >); - -#ifdef %swig_vector_methods_val - // Add swig/language extra methods - %swig_vector_methods_val(std::vector<_Tp const*, _Alloc >); -#endif - - %std_vector_methods_val(vector); - }; - - // *** - // bool specialization - // *** - - template - class vector { - public: - typedef size_t size_type; - typedef ptrdiff_t difference_type; - typedef bool value_type; - typedef value_type* pointer; - typedef const value_type* const_pointer; - typedef value_type reference; - typedef value_type const_reference; - typedef _Alloc allocator_type; - - %traits_swigtype(bool); - - %fragment(SWIG_Traits_frag(std::vector), "header", - fragment=SWIG_Traits_frag(bool), - fragment="StdVectorTraits") { - namespace swig { - template <> struct traits > { - typedef value_category category; - static const char* type_name() { - return "std::vector"; - } - }; - } - } - - %typemap_traits_ptr(SWIG_TYPECHECK_VECTOR, std::vector); - - -#ifdef %swig_vector_methods_val - // Add swig/language extra methods - %swig_vector_methods_val(std::vector); -#endif - - %std_vector_methods_val(vector); - -#if defined(SWIG_STD_MODERN_STL) && !defined(SWIG_STD_NOMODERN_STL) - void flip(); -#endif - - }; - -} diff --git a/tests/examplefiles/tads3_example.t b/tests/examplefiles/tads3_example.t deleted file mode 100644 index 41881c93..00000000 --- a/tests/examplefiles/tads3_example.t +++ /dev/null @@ -1,1248 +0,0 @@ -#charset "utf-8" - -#include -#include - -extern function extern_function; -extern method extern_method; -extern function extern_function(a, b=a, c='<>', d:, e:=1, f?, ...); -extern method extern_method(a, b=a, c='<>', d:, e:=1, f?, [g]);; -extern class extern_class; -extern object extern_object; -intrinsic 't3vm' { }; -#ifndef PropDefAny -intrinsic class Object 'root-object/030004' { }; -#endif -object /**//**/ // /* \\ -#define Room Unthing - template [lst]; - -/* - * Quotations from "Le Roman de la Rose" are transcribed from MS. Douce 195, - * owned by Bodleian Library, University of Oxford - * (http://image.ox.ac.uk/show?collection=bodleian&manuscript=msdouce195). - */ - -versionInfo: GameID - IFID = '17d8efc3-07da-4dde-a837-ff7c4e386a77' - name = 'Pygmentalion' - byline = 'by David Corbett' - htmlByline = 'by David - Corbett' - version = '1' - authorEmail = 'David Corbett\040' - desc = 'You have fallen in love with a statue\x2e' - htmlDesc = 'You have fallen in love with a statue\x2E' -; - -/* - * Pymalion fu ẽtailleꝛꝛes. - * Poᷣtrayãs en fus ⁊ en peꝛꝛeˢ - * En metaulx en os ⁊ en cyꝛes - * Et en touteˢ aultres matires. - * Quon peult a tel oeuure trouuer. - * Poᷣ ſon grant engin eſpꝛouuer. - * Car maiſtre en fu bien dire loz. - * Ainſi com poᷣ acquerre loz - * Se voult a poᷣtraire deduyꝛe - * Si fiſt vng ymage diuuyꝛe - * Et miſt au faire tel entente - * Quel fu ſi plaiſãt et ſi gente - * Quel ſembloit eſtre auſſi viue. - * Com la plus belle riens q̇ viue - * (MS. Douce 195, fol. 149r) - */ - -modify _init() -{ - ({: local r, r = randomize, r})(); - replaced(); -} - -gameMain: GameMainDef - initialPlayerChar: Actor { - desc = "You look the same as usual, but you feel unusually - sentimental. " - location = entrance - } - showIntro - { - "The statue is undeniably a masterpiece: the most skillful carving you - have ever done, and the most beautiful woman you have ever seen. - Unfortunately, she is also an inanimate block, and now you can neither - work nor rest for unrequitable love.\b - Once again you stumble into your studio, hoping and praying to find - your statue brought to life.\b - <>\r\n - <>\b"; - } -; - -enum token token, tokOp, token; - -modify cmdTokenizer - rules_ = static - [ - ['whitespace', new RexPattern('%s+'), nil, &tokCvtSkip, nil], - ['punctuation', new RexPattern('[.,;:?!]'), tokPunct, nil, nil], - ['spelled number', - new RexPattern('(twenty|thirty|forty|fifty|sixty|' - + 'seventy|eighty|ninety)-' - + '(one|two|three|four|five|six|seven|eight|nine)' - + '(?!)'), - tokWord, &tokCvtSpelledNumber, nil], - ['spelled operator', new RexPattern( - '(plus|positive|minus|negat(iv)?e|not|inverse(%s+of)?|' - + 'times|over|divided%s+by|mod(ulo)?|and|xor|or|[al]?sh[lr])' - + '(?!)'), - tokOp, &tokCvtSpelledOperator, nil], - ['operator', R'[-!~+*/%&^|]|<<|>>>?', tokOp, nil, nil], - ['word', new RexPattern('*'), - tokWord, nil, nil], - ['string ascii-quote', R"""([`\'"])(.*)%1(?!)""", - tokString, nil, nil], - ['string back-quote', R"`(.*)'(?!%w)", tokString, nil, nil], - ['string curly single-quote', new RexPattern('\u2018(.*)\u2019'), - tokString, nil, nil], - ['string curly double-quote', new RexPattern('\u201C(.*)\u201D'), - tokString, nil, nil], - ['string unterminated', R'''([`\'"\u2018\u201C](.*)''', tokString, - nil, nil], - ['integer', new RexPattern('[0-9]+'), tokInt, nil, nil] - ] - replace tokCvtSpelledOperator(txt, typ, toks) - { - toks.append([rexReplace(R'%s+', txt.toLower(), '\\'), typ, txt]); - } -; - -/* Tokens */ - -/* - * Puiˢ li reueſt en maĩteˢ guiſes. - * Robeˢ faicteˢ ꝑ grãˢ maiſtriſeˢ. - * De biaulx dꝛaps de ſoye ⁊ de laĩe. - * Deſcarlate de tiretaine - * De vert de pers ⁊ de bꝛunecte - * De couleᷣ freſche fine ⁊ necte - * Ou moult a riches paneˢ miſes. - * Herminees vaires et griſes - * Puis les li roſte puis reſſaye. - * Cõmant li ſiet robbe de ſaye - * Sendaulx meloguins galebꝛunˢ. - * Indes vermeilz iaunes ⁊ bꝛunˢ. - * [...] - * Aultre foiz luy repꝛẽd courage. - * De tout oſter ⁊ mectre guindeˢ. - * Iaunes vermeilles vers ⁊ indeˢ. - * (MS. Douce 195, fol. 150r) - */ - -class Token: Achievement -{ - points = 1; - desc = "<><><>"; - before = before = '', before_ - after = (after = '', after_) -} - -Token template inherited 'before_' 'after_' 'desc_'; - -#define DefineToken(name, before, after) name##Token: Token before after #@name - -DefineToken(builtin, '', ''); -DefineToken(comment, '', ''); -DefineToken(decorator, '', ''); -DefineToken(error, '', ''); -DefineToken(escape, '', ''); -DefineToken(float, '', ''); -DefineToken(keyword, '', ''); -DefineToken(label, '', ''); -DefineToken(long, '', ''); -DefineToken(name, '', ''); -DefineToken(operator, '', ''); -DefineToken(string, '', ''); -DefineToken(whitespace, '', ''); - -function highlightToken(tokenString) -{ - local token = [ - 'built in' -> builtinToken, - 'comment' -> commentToken, - 'decorator' -> decoratorToken, - 'error' -> errorToken, - 'escape' -> escapeToken, - 'float' -> floatToken, - 'keyword' -> keywordToken, - 'label' -> labelToken, - 'long' -> longToken, - 'name' -> nameToken, - 'operator' -> operatorToken, - 'string' -> stringToken, - 'white space' -> whitespaceToken, - * -> nil - ][tokenString.toLower()]; - if (!token) - return tokenString; - token.awardPointsOnce(); - return '<><><>'; -} - -string /**//**/ // /* \\ -#define Room Unthing - template <> highlightToken; - -/* Grammar for materials */ - -dictionary property material; -grammar adjWord(material): ->adj_ : AdjPhraseWithVocab - getVocabMatchList(resolver, results, extraFlags) - { - return getWordMatches(adj_, &material, resolver, extraFlags, - VocabTruncated); - } - getAdjustedTokens() - { - return [adj_, &material]; - } -; - -/* Rooms and objects */ - -+ property location; - -entrance: Room 'Entrance' - "You are in the entrance to your studio. This is where you carve great - works of art, not that you have felt like making any lately. A door leads - outside, and the studio itself is to the north and the east. " - north = workbenchRoom - northeast = sinkRoom - east = altarRoom - south = door - out asExit(south) -; - -+ door: LockableWithKey, Door 'door' 'door' - "It is a simple wooden door. " - material = 'wood' 'wooden' - keyList = [key] - cannotOpenLockedMsg = '{The dobj/He} {is} locked. You cannot - <>! ' -; - -key: PresentLater, Key 'key' 'key' @altar - "It is a <>grimy<> bronze key. <>On it is \ - etched the word <>. " - material = 'bronze' - clean = nil - keyword = (keyword = randomGreekWord(), targetprop) - dobjFor(Clean) { verify { } action { askForIobj(CleanWith); } } - dobjFor(CleanWith) - { - verify - { - if (clean) - illogicalAlready('{The dobj/He} {is} already clean. '); - } - action - { - gDobj.clean = true; - "{You/He} clean{s} {the dobj/him}, revealing an inscription. "; - } - } - dobjFor(Read) { verify { nonObvious; } } -; - -workbenchRoom: Room 'At the Workbench' - "This workbench, in the northwest part of the studio, was where you would - create works of art. Now you just come here to contemplate your - creation’s beauty and lament your hopeless situation.\b - The statue stands on a plinth beside the workbench. " - east = sinkRoom - southeast = altarRoom - south = entrance - getDestName(actor, origin) { return 'the workbench'; } -; - -+ workbench: Fixture, Surface - 'workbench/bench/material/materials/tool/tools' 'workbench' - "Normally, the workbench would be scattered with half-finished projects, - but now your tools and materials lie abandoned. " -; - -+ plinth: Fixture, Thing 'marble plinth/pedestal' 'plinth' - "It’s a smoothed block of marble about a cubit high. " -; - -replace grammar predicate(Screw): ' ': object; -replace grammar predicate(ScrewWith): ' ': object; -+ + statue: Fixture, Surface - '"creation\'s" beauty/carving/creation/galatea/statue/woman' 'statue' - "This is a<>n untitled<> statue of a woman - carved from <>flawless <> - <>milk-white <>ivory. - <>Her - <>long <>hair is done up in a - chignon<>, with a few strands falling down her - neck<><>, and \v<>.<><> - <>She radiates an aura of contrapposto grace. - <><>\bYou wonder what she - <>is going to<>will<> be like as a - woman. - <>Maybe she’ll be a painter and expand - your business.<> - <>Maybe she’ll have a head for figures - and will put the accounts in order.<> - <>She’ll love you, obviously, but beyond - that you don’t know.<><> - <>If only Aphrodite would bring her to life - without this silly puzzle about tokens and mirrors!<> " - material = 'ivory' - propertyset 'is*' - { - propertyset 'H*' - { - im = nil\ - er = true; - } - It = true - } - iobjFor(PutOn) - { - check - { - if (gDobj not /**//**/ // /* \\ -#define Room Unthing - in (necklace, __objref(necklace, warn))) - "How rude! You don’t know what you were thinking. "; - } - } - iobjFor(GiveTo) remapTo(PutOn, DirectObject, IndirectObject) -; - -+++ necklace: Wearable - 'pearl necklace/string pearls' '<> of pearls' - "This is a masterfully crafted pearl necklace. You hope the statue - won’t mind if you hold onto it for a while. " - initDesc = "You gave the statue this pearl necklace yesterday. " - isPlural = true -; - -altarRoom: Room 'At the Altar' - "Light from the window illuminates a crude altar. Until recently, this - corner was your bedroom. The rest of the studio lies north and west. " - north = sinkRoom - northwest = workbenchRoom - west = entrance - getDestName(actor, origin) { return 'the altar'; } -; - -+ window: Fixture 'window' 'window' - "It’s just a window above the altar. <>The space under the - window is blank; as an interior <>, you can’t - help but think the wall would benefit from a bas-relief, but – - sigh &endash you are too lovelorn to wield the chisel. <<||>>The - wall right below it is a boring <>. <>" -; - -+ altar: Fixture, Surface 'crude rough altar/banker/slab' 'altar' - "A rough marble slab lies on a wooden banker. In your rush to construct an - altar, you neglected the usual surface finish and friezes, but you pray at - it anyway. You are sure the gods will understand. " - material = 'marble' 'wood' 'wooden' - bulkCapacity = 1 - dobjFor(PrayAt) - { - verify { } - action() - { - /* - * Biaulx dieux diſt il tout ce poez. - * Sil voꝰ plaiſt ma requeſte oez - * [...] - * Et la belle q̇ mon cueᷣ emble - * Qui ſi bien yuoyꝛe reſſemble. - * Deuiengne ma loyal amye - * De fẽme ait coꝛps ame et vie - * (MS. Douce 195, fol. 151r) - */ - local offering; - foreach (offering in contents); - if (!keywordToken.scoreCount) - "<>O Aphrodite, you say, comforter of - hopeless lovers, hear my prayer! May she to whom I have given - my heart be given body, soul, and life. And a colorful - personality. And&mdash\b - You are interrupted by a shimmering about the altar. As you - watch, it takes the form of a callipygian goddess.\b - Mortal, I have heard your heart-felt and oft-repeated plea, - and I will take pity on you, says Aphrodite. If you give - me a token of your love as an offering, I will give you the - <> of life. Speak this word in the - presence of a mirror, and I will grant your request.\b - She fades away, adding, As for her colorful personality, - just look around you. <><>"; - else if (key.location) - "O Aphrodite, you say, what am I supposed to do - again?\bThe goddess reappears and reminds you to speak the - keyword of life at a mirror. <>What’s the - keyword, then? Gods help those who help themselves. - Figure it out yourself.<>Why a mirror? I like - mirrors.<> "; - else if (offering == necklace) - { - "Aphrodite reappears. A necklace! Perfect! The necklace - disappears in a bright flash. When your eyes readjust, you see - a key lying in its place. "; - necklace.moveInto(nil); - key.makePresent(); - } - else if (+offering) - "Aphrodite reappears. She eyes <> - skeptically. <>No <>.<>You - call that a token of love?<>\^<>? - Really?<>Come on, mortal, it’s not that - difficult!<> "; - else - "I heard you the first time, says Aphrodite. Prove - your devotion by offering a token of your love at the altar, - or the deal’s off. "; - } - } - iobjFor(GiveTo) remapTo(PutOn, DirectObject, IndirectObject) -; - -aphrodite: Unthing - '(love) aphrodite/cytherea/god/goddess/venus love' 'Aphrodite' - '<>You can only pray to a god. - <>You need an altar to interact with a god. ' - location = (gPlayerChar) - isProperName = true - isHer = true - iobjFor(GiveTo) - { - verify - { - illogical('She isn’t here. You’ll have to leave {the - dobj/him} somewhere she can find it. '); - } - } - dobjFor(PrayAt) maybeRemapTo(gActor.canSee(altar), PrayAt, altar) -; - -sinkRoom: Room 'Washroom' - "Sculpting marble is a dusty business. You use this sink to clean off after - a hard day’s work. Beside the sink is a small end table, and on the - wall is a calculator. The rest of the studio is south and west. " - south = altarRoom - southwest = entrance - west = workbenchRoom -; - -property level, overflowing; -export overflowing; -export level 'waterLevel'; -+ sink: Fixture '(auto) (mop) auto-sink/autosink/bowl/drain/faucet/sink' 'sink' - "This is a state-of-the-art mop sink with anti-miasmic coating and bronze - backsplash. It is so modern, there are no handles or other obvious ways to - turn it on.\b - <>It is overflowing. - <>It is full to the brim with water. - <= 15000>>It is full of water. - <>It is half full of water. - <= 2000>>There is some water in the sink. - < 0>>A small puddle has formed at the bottom of the sink. - <>It is empty. - <>It looks like it hasn’t been used in a - <> time. " - level = not in ([lst]) { return argcount; } - not = in() - overflowing = nil - current = self - setLevel(level:) - { - targetobj.current.overflowing = level == nil; - targetobj.current.level = min(level ?? 0, 20000); - if (sink.overflowing || sink.level > 0e+1) - sinkWater.makePresent(); - if (basin.overflowing || basin.level > 0e-1) - basinWater.makePresent(); - } - iobjFor(CleanWith) remapTo(CleanWith, DirectObject, sinkWater) -; - -++ sinkWater: PresentLater, Fixture - '(sink) water sink water' 'water' "<>" - disambigName = 'water in the sink' - dobjFor(Drink) - { - verify { illogical('''{You're} not thirsty. '''); } - } - iobjFor(CleanWith) - { - preCond = [] - verify { - if (!location) - illogicalNow('There is no water in the sink. '); - if (!sink.overflowing && sink.level < 1e2) - illogicalNow('There is not enough water in the sink. '); - } - } -; - -+ table: Fixture, Surface 'small end bracket/table' 'table' - "<>Upon closer inspection, you see that \v<>The table is - bracketed to the wall. " -; - -++ Readable '"operator\'s" manual' 'manual' - "
    <>’s Manual<\center>\b - To control the auto-sink, use the calculator add-on to enter the - desired volume of water. For example,\n - \t\t<>\n - to fill the basin with <<% ,d 0x69 * 0105>> kochliaria{ x = "hello" } - -x = 'a' -x # : Int32 | String | Char - - -abstract class Animal - # Makes this animal talk - abstract def talk -end - -class Dog < Animal - def talk - "Woof!" - end -end - -class Cat < Animal - def talk - "Miau" - end -end - -class Person - getter pet - - def initialize(@name, @pet) - end -end - -john = Person.new "John", Dog.new -peter = Person.new "Peter", Cat.new - - -john.pet.talk #=> "Woof!" - - -a = 1 > 2 ? 3 : 4 - -# The above is the same as: -a = if 1 > 2 - 3 - else - 4 - end - - -def some_method : String - "hello" -end - - -PI = 3.14 - -module Earth - RADIUS = 6_371_000 -end - -PI #=> 3.14 -Earth::RADIUS #=> 6_371_000 - - -TEN = begin - a = 0 - while a < 10 - a += 1 - end - a -end - -TEN #=> 10 - - -class Person - getter name - - def initialize(@name) - @age = 0 - end -end - -john = Person.new "John" -john.name #=> "John" -john.name.size #=> 4 - - -one = Person.new 1 -one.name #=> 1 -one.name + 2 #=> 3 - - -john = Person.new "John" -one = Person.new 1 - - -john = Person.new "John" -one = Person.new 1 - -# Error: undefined method 'size' for Int32 -john.name.size - -# Error: no overload matches 'String#+' with types Int32 -john.name + 3 - - -john = Person.new "John" -john.name.size -one = Person.new 1 - - -class Person - getter name - - def initialize(@name) - @age = 0 - end - - def address - @address - end - - def address=(@address) - end -end - -john = Person.new "John" -john.address = "Argentina" - - -# Error: undefined method 'size' for Nil -john.address.size - - -class Person - @age = 0 - - def initialize(@name) - end -end - - -class Person - @age : Int32 - - def initialize(@name) - @age = 0 - end -end - - -a = if 2 > 1 - 3 - else - 4 - end -a #=> 3 - - -if 1 > 2 -else - 3 -end - - -def twice(&block) - yield - yield -end - - -twice() do - puts "Hello!" -end - -twice do - puts "Hello!" -end - -twice { puts "Hello!" } - - -def twice - yield 1 - yield 2 -end - -twice do |i| - puts "Got #{i}" -end - - -twice { |i| puts "Got #{i}" } - - -def many - yield 1, 2, 3 -end - -many do |x, y, z| - puts x + y + z -end - -# Output: 6 - - -def many - yield 1, 2, 3 -end - -many do |x, y| - puts x + y -end - -# Output: 3 - - -def twice - yield - yield -end - -twice do |i| - puts i.inspect -end - - -def some - yield 1, 'a' - yield true, "hello" - yield 2 -end - -some do |first, second| - # first is Int32 | Bool - # second is Char | String | Nil -end - - -method do |argument| - argument.some_method -end - - -method(&.some_method) - - -method &.some_method(arg1, arg2) - - -method &.+(2) -method &.[index] - - -def twice - v1 = yield 1 - puts v1 - - v2 = yield 2 - puts v2 -end - -twice do |i| - i + 1 -end - - -ary = [1, 2, 3] -ary.map { |x| x + 1 } #=> [2, 3, 4] -ary.select { |x| x % 2 == 1 } #=> [1, 3] - - -def transform(value) - yield value -end - -transform(1) { |x| x + 1 } #=> 2 - - -def thrice - puts "Before 1" - yield 1 - puts "Before 2" - yield 2 - puts "Before 3" - yield 3 - puts "After 3" -end - -thrice do |i| - if i == 2 - break - end -end - - -def twice - yield 1 - yield 2 -end - -twice { |i| i + 1 } #=> 3 -twice { |i| break "hello" } #=> "hello" - - -value = twice do |i| - if i == 1 - break "hello" - end - i + 1 -end -value #:: Int32 | String - - -values = twice { break 1, 2 } -values #=> {1, 2} - - -value = twice { break } -value #=> nil - - -def twice - yield 1 - yield 2 -end - -twice do |i| - if i == 1 - puts "Skipping 1" - next - end - - puts "Got #{i}" -end - - - -def twice - v1 = yield 1 - puts v1 - - v2 = yield 2 - puts v2 -end - -twice do |i| - if i == 1 - next 10 - end - - i + 1 -end - -# Output -# 10 -# 3 - - -class Foo - def one - 1 - end - - def yield_with_self - with self yield - end - - def yield_normally - yield - end -end - -def one - "one" -end - -Foo.new.yield_with_self { one } # => 1 -Foo.new.yield_normally { one } # => "one" - - -def twice - yield 1 - yield 2 -end - -twice do |i| - puts "Got: #{i}" -end - - -i = 1 -puts "Got: #{i}" -i = 2 -puts "Got: #{i}" - - -3.times do |i| - puts i -end - - -struct Int - def times - i = 0 - while i < self - yield i - i += 1 - end - end -end - - -i = 0 -while i < 3 - puts i - i += 1 -end - - -class Person - def initialize(@name) - end - - def greet - puts "Hi, I'm #{@name}" - end -end - -class Employee < Person -end - -employee = Employee.new "John" -employee.greet # "Hi, I'm John" - - -class Person - def initialize(@name) - end -end - -class Employee < Person - def initialize(@name, @company_name) - end -end - -Employee.new "John", "Acme" # OK -Employee.new "Peter" # Error: wrong number of arguments - # for 'Employee:Class#new' (1 for 2) - - -class Person - def greet(msg) - puts "Hi, #{msg}" - end -end - -class Employee < Person - def greet(msg) - puts "Hello, #{msg}" - end -end - -p = Person.new -p.greet "everyone" # "Hi, everyone" - -e = Employee.new -e.greet "everyone" # "Hello, everyone" - - -class Person - def greet(msg) - puts "Hi, #{msg}" - end -end - -class Employee < Person - def greet(msg : Int32) - puts "Hi, this is a number: #{msg}" - end -end - -e = Employee.new -e.greet "everyone" # "Hi, everyone" - -e.greet 1 # "Hi, this is a number: 1" - - -class Person - def greet(msg) - puts "Hello, "#{msg}" - end -end - -class Employee < Person - def greet(msg) - super # Same as: super(msg) - super("another message") - end -end - - -def int_to_int(&block : Int32 -> Int32) - block -end - -proc = int_to_int { |x| x + 1 } -proc.call(1) #=> 2 - - -class Model - def on_save(&block) - @on_save_callback = block - end - - def save - if callback = @on_save_callback - callback.call - end - end -end - -model = Model.new -model.on_save { puts "Saved!" } -model.save # prints "Saved!" - - -def some_proc(&block : Int32 ->) - block -end - -proc = some_proc { |x| x + 1 } -proc.call(1) # void - - -def some_proc(&block : Int32 -> _) - block -end - -proc = some_proc { |x| x + 1 } -proc.call(1) # 2 - -proc = some_proc { |x| x.to_s } -proc.call(1) # "1" - - -macro update_x - x = 1 -end - -x = 0 -update_x -x #=> 1 - - -macro dont_update_x - %x = 1 - puts %x -end - -x = 0 -dont_update_x # outputs 1 -x #=> 0 - - -macro fresh_vars_sample(*names) - # First declare vars - {% for name, index in names %} - print "Declaring: ", "%name{index}", '\n' - %name{index} = {{index}} - {% end %} - - # Then print them - {% for name, index in names %} - print "%name{index}: ", %name{index}, '\n' - {% end %} -end - -fresh_vars_sample a, b, c - -# Sample output: -# Declaring: __temp_255 -# Declaring: __temp_256 -# Declaring: __temp_257 -# __temp_255: 0 -# __temp_256: 1 -# __temp_257: 2 - - -class Object - macro def instance_vars_names : Array(String) - {{ @type.instance_vars.map &.name.stringify }} - end -end - -class Person - def initialize(@name, @age) - end -end - -person = Person.new "John", 30 -person.instance_vars_names #=> ["name", "age"] - - -class Object - macro def has_instance_var?(name) : Bool - # We cannot access name inside the macro expansion here, - # instead we need to use the macro language to construct an array - # and do the inclusion check at runtime. - {{ @type.instance_vars.map &.name.stringify }}.includes? name - end -end - -person = Person.new "John", 30 -person.has_instance_var?("name") #=> true -person.has_instance_var?("birthday") #=> false - - -class Parent - macro inherited - def {{@type.name.downcase.id}} - 1 - end - end -end - -class Child < Parent -end - -Child.new.child #=> 1 - - -macro method_missing(name, args, block) - print "Got ", {{name.id.stringify}}, " with ", {{args.size}}, " arguments", '\n' -end - -foo # Prints: Got foo with 0 arguments -bar 'a', 'b' # Prints: Got bar with 2 arguments - - -sizeof(Int32) #=> 4 -sizeof(Int64) #=> 8 - - -# On a 64 bits machine -sizeof(Pointer(Int32)) #=> 8 -sizeof(String) #=> 8 - - -a = 1 -sizeof(typeof(a)) #=> 4 - - -class Foo - macro emphasize(value) - "***#{ {{value}} }***" - end - - def yield_with_self - with self yield - end -end - -Foo.new.yield_with_self { emphasize(10) } #=> "***10***" - - -# This generates: -# -# def :foo -# 1 -# end -define_method :foo, 1 - - -macro define_method(name, content) - def {{name.id}} - {{content}} - end -end - -# This correctly generates: -# -# def foo -# 1 -# end -define_method :foo, 1 - - -macro define_method(name, content) - def {{name}} - {% if content == 1 %} - "one" - {% else %} - {{content}} - {% end %} - end -end - -define_method foo, 1 -define_method bar, 2 - -foo #=> one -bar #=> 2 - - -{% if env("TEST") %} - puts "We are in test mode" -{% end %} - - -macro define_dummy_methods(names) - {% for name, index in names %} - def {{name.id}} - {{index}} - end - {% end %} -end - -define_dummy_methods [foo, bar, baz] - -foo #=> 0 -bar #=> 1 -baz #=> 2 - - -macro define_dummy_methods(hash) - {% for key, value in hash %} - def {{key.id}} - {{value}} - end - {% end %} -end -define_dummy_methods({foo: 10, bar: 20}) -foo #=> 10 -bar #=> 20 - - -{% for name, index in ["foo", "bar", "baz"] %} - def {{name.id}} - {{index}} - end -{% end %} - -foo #=> 0 -bar #=> 1 -baz #=> 2 - - -macro define_dummy_methods(*names) - {% for name, index in names %} - def {{name.id}} - {{index}} - end - {% end %} -end - -define_dummy_methods foo, bar, baz - -foo #=> 0 -bar #=> 1 -baz #=> 2 - - -macro println(*values) - print {{*values}}, '\n' -end - -println 1, 2, 3 # outputs 123\n - - -VALUES = [1, 2, 3] - -{% for value in VALUES %} - puts {{value}} -{% end %} - - -until some_condition - do_this -end - -# The above is the same as: -while !some_condition - do_this -end - - -a = some_condition ? nil : 3 -# a is Int32 or Nil - -if a - # Since the only way to get here is if a is truthy, - # a can't be nil. So here a is Int32. - a.abs -end - - -if a = some_expression - # here a is not nil -end - - -if a && b - # here both a and b are guaranteed not to be Nil -end - - -if @a - # here @a can be nil -end - - -# First option: assign it to a variable -if a = @a - # here a can't be nil -end - -# Second option: use `Object#try` found in the standard library -@a.try do |a| - # here a can't be nil -end - - -if method # first call to a method that can return Int32 or Nil - # here we know that the first call did not return Nil - method # second call can still return Int32 or Nil -end - - -class Person - def become_older(by = 1) - @age += by - end -end - -john = Person.new "John" -john.age #=> 0 - -john.become_older -john.age #=> 1 - -john.become_older 2 -john.age #=> 3 - - -john.become_older by: 5 - - -def some_method(x, y = 1, z = 2, w = 3) - # do something... -end - -some_method 10 # x = 10, y = 1, z = 2, w = 3 -some_method 10, z: 10 # x = 10, y = 1, z = 10, w = 3 -some_method 10, w: 1, y: 2, z: 3 # x = 10, y = 2, z = 3, w = 1 - - -case exp -when value1, value2 - do_something -when value3 - do_something_else -else - do_another_thing -end - - -case var -when String - # var : String - do_something -when Int32 - # var : Int32 - do_something_else -else - # here var is neither a String nor an Int32 - do_another_thing -end - - -case num -when .even? - do_something -when .odd? - do_something_else -end - - -case -when cond1, cond2 - do_something -when cond3 - do_something_else -end - - -a = 1 -a.responds_to?(:abs) #=> true -a.responds_to?(:size) #=> false - - -foo_or_bar = /foo|bar/ -heeello = /h(e+)llo/ -integer = /\d+/ - - -r = /foo/imx - - -slash = /\// - - -r = %r(regex with slash: /) - - -"hello world" - - -"\"" # double quote -"\\" # backslash -"\e" # escape -"\f" # form feed -"\n" # newline -"\r" # carriage return -"\t" # tab -"\v" # vertical tab - - -"\101" # == "A" -"\123" # == "S" -"\12" # == "\n" -"\1" # string with one character with code point 1 - - -"\u0041" # == "A" - - -"\u{41}" # == "A" -"\u{1F52E}" # == "🔮" - - -"hello - world" # same as "hello\n world" - - -"hello " \ -"world, " \ -"no newlines" # same as "hello world, no newlines" - - -"hello \ - world, \ - no newlines" # same as "hello world, no newlines" - - -# Supports double quotes and nested parenthesis -%(hello ("world")) # same as "hello (\"world\")" - -# Supports double quotes and nested brackets -%[hello ["world"]] # same as "hello [\"world\"]" - -# Supports double quotes and nested curlies -%{hello {"world"}} # same as "hello {\"world\"}" - -# Supports double quotes and nested angles -%> # same as "hello <\"world\">" - - -<<-XML - - - -XML - - -# Same as "Hello\n world" -<<-STRING - Hello - world - STRING - -# Same as " Hello\n world" -<<-STRING - Hello - world - STRING - - -a = 1 -b = 2 -"sum = #{a + b}" # "sum = 3" - - -1.0 # Float64 -1.0_f32 # Float32 -1_f32 # Float32 - -1e10 # Float64 -1.5e10 # Float64 -1.5e-7 # Float64 - -+1.3 # Float64 --0.5 # Float64 - - -1_000_000.111_111 # better than 1000000.111111 - - -'a' -'z' -'0' -'_' -'あ' - - -'\'' # single quote -'\\' # backslash -'\e' # escape -'\f' # form feed -'\n' # newline -'\r' # carriage return -'\t' # tab -'\v' # vertical tab - - -'\101' # == 'A' -'\123' # == 'S' -'\12' # == '\n' -'\1' # code point 1 - - -'\u0041' # == 'A' - - -'\u{41}' # == 'A' -'\u{1F52E}' # == '🔮' - - -{1 => 2, 3 => 4} # Hash(Int32, Int32) -{1 => 2, 'a' => 3} # Hash(Int32 | Char, Int32) - - -{} of Int32 => Int32 # same as Hash(Int32, Int32).new - - -{key1: 'a', key2: 'b'} # Hash(Symbol, Char) - - -{"key1": 'a', "key2": 'b'} # Hash(String, Char) - - -MyType{"foo": "bar"} - - -tmp = MyType.new -tmp["foo"] = "bar" -tmp - - -tmp = MyType(typeof("foo"), typeof("bar")).new -tmp["foo"] = "bar" -tmp - - -MyType(String, String) {"foo": "bar"} - - -:hello -:good_bye - -# With spaces and symbols -:"symbol with spaces" - -# Ending with question and exclamation marks -:question? -:exclamation! - -# For the operators -:+ -:- -:* -:/ -:== -:< -:<= -:> -:>= -:! -:!= -:=~ -:!~ -:& -:| -:^ -:~ -:** -:>> -:<< -:% -:[] -:[]? -:[]= -:<=> -:=== - - -x..y # an inclusive range, in mathematics: [x, y] -x...y # an exclusive range, in mathematics: [x, y) - - -# A proc without arguments -->{ 1 } # Proc(Int32) - -# A proc with one argument -->(x : Int32) { x.to_s } # Proc(Int32, String) - -# A proc with two arguments: -->(x : Int32, y : Int32) { x + y } # Proc(Int32, Int32, Int32) - - -Proc(Int32, String).new { |x| x.to_s } # Proc(Int32, String) - - -proc = ->(x : Int32, y : Int32) { x + y } -proc.call(1, 2) #=> 3 - - -def one - 1 -end - -proc = ->one -proc.call #=> 1 - - -def plus_one(x) - x + 1 -end - -proc = ->plus_one(Int32) -proc.call(41) #=> 42 - - -str = "hello" -proc = ->str.count(Char) -proc.call('e') #=> 1 -proc.call('l') #=> 2 - - -tuple = {1, "hello", 'x'} # Tuple(Int32, String, Char) -tuple[0] #=> 1 (Int32) -tuple[1] #=> "hello" (String) -tuple[2] #=> 'x' (Char) - - -[1, 2, 3] # Array(Int32) -[1, "hello", 'x'] # Array(Int32 | String | Char) - - -[] of Int32 # same as Array(Int32).new - - -%w(one two three) # ["one", "two", "three"] - - -%i(one two three) # [:one, :two, :three] - - -MyType{1, 2, 3} - - -tmp = MyType.new -tmp << 1 -tmp << 2 -tmp << 3 -tmp - - -tmp = MyType(typeof(1, 2, 3)).new -tmp << 1 -tmp << 2 -tmp << 3 -tmp - - -MyType(Int32 | String) {1, 2, "foo"} - - -nil - - -1 # Int32 - -1_i8 # Int8 -1_i16 # Int16 -1_i32 # Int32 -1_i64 # Int64 - -1_u8 # UInt8 -1_u16 # UInt16 -1_u32 # UInt32 -1_u64 # UInt64 - -+10 # Int32 --20 # Int32 - -2147483648 # Int64 -9223372036854775808 # UInt64 - - -1_000_000 # better than 1000000 - - -0b1101 # == 13 - - -0o123 # == 83 - - -0xFE012D # == 16646445 -0xfe012d # == 16646445 - - -true # A Bool that is true -false # A Bool that is false - - -a = 1 - -ptr = pointerof(a) -ptr.value = 2 - -a #=> 2 - - -class Point - def initialize(@x, @y) - end - - def x - @x - end - - def x_ptr - pointerof(@x) - end -end - -point = Point.new 1, 2 - -ptr = point.x_ptr -ptr.value = 10 - -point.x #=> 10 - - -def add(x : Number, y : Number) - x + y -end - -# Ok -add 1, 2 # Ok - -# Error: no overload matches 'add' with types Bool, Bool -add true, false - - -def add(x, y) - x + y -end - -add true, false - - -# A class that has a + method but isn't a Number -class Six - def +(other) - 6 + other - end -end - -# add method without type restrictions -def add(x, y) - x + y -end - -# OK -add Six.new, 10 - -# add method with type restrictions -def restricted_add(x : Number, y : Number) - x + y -end - -# Error: no overload matches 'restricted_add' with types Six, Int32 -restricted_add Six.new, 10 - - -class Person - def ==(other : self) - other.name == name - end - - def ==(other) - false - end -end - -john = Person.new "John" -another_john = Person.new "John" -peter = Person.new "Peter" - -john == another_john #=> true -john == peter #=> false (names differ) -john == 1 #=> false (because 1 is not a Person) - - -class Person - def self.compare(p1 : self, p2 : self) - p1.name == p2.name - end -end - -john = Person.new "John" -peter = Person.new "Peter" - -Person.compare(john, peter) # OK - - -def foo(x : Int32) -end - -foo 1 # OK -foo "hello" # Error - - -def foo(x : Int32.class) -end - -foo Int32 # OK -foo String # Error - - -def foo(x : Int32.class) - puts "Got Int32" -end - -def foo(x : String.class) - puts "Got String" -end - -foo Int32 # prints "Got Int32" -foo String # prints "Got String" - - -def foo(*args : Int32) -end - -def foo(*args : String) -end - -foo 1, 2, 3 # OK, invokes first overload -foo "a", "b", "c" # OK, invokes second overload -foo 1, 2, "hello" # Error -foo() # Error - - -def foo - # This is the empty-tuple case -end - - -def foo(x : T) - T -end - -foo(1) #=> Int32 -foo("hello") #=> String - - -def foo(x : Array(T)) - T -end - -foo([1, 2]) #=> Int32 -foo([1, "a"]) #=> (Int32 | String) - - -def foo(x : T.class) - Array(T) -end - -foo(Int32) #=> Array(Int32) -foo(String) #=> Array(String) - - -class Person - # Increases age by one - def become_older - @age += 1 - end - - # Increases age by the given number of years - def become_older(years : Int32) - @age += years - end - - # Increases age by the given number of years, as a String - def become_older(years : String) - @age += years.to_i - end - - # Yields the current age of this person and increases - # its age by the value returned by the block - def become_older - @age += yield @age - end -end - -person = Person.new "John" - -person.become_older -person.age #=> 1 - -person.become_older 5 -person.age #=> 6 - -person.become_older "12" -person.age #=> 18 - -person.become_older do |current_age| - current_age < 20 ? 10 : 30 -end -person.age #=> 28 - - -a = 1 -a.is_a?(Int32) #=> true -a.is_a?(String) #=> false -a.is_a?(Number) #=> true -a.is_a?(Int32 | String) #=> true - - -# One for each thread -@[ThreadLocal] -$values = [] of Int32 - - -@[AlwaysInline] -def foo - 1 -end - - -@[NoInline] -def foo - 1 -end - - -lib LibFoo - @[CallConvention("X86_StdCall")] - fun foo : Int32 -end - - -def sum(*elements) - total = 0 - elements.each do |value| - total += value - end - total -end - -# elements is Tuple(Int32, Int32, Int32, Float64) -sum 1, 2, 3, 4.5 - - -if a.responds_to?(:abs) - # here a's type will be reduced to those responding to the 'abs' method -end - - -a = some_condition ? 1 : "hello" -# a : Int32 | String - -if a.responds_to?(:abs) - # here a will be Int32, since Int32#abs exists but String#abs doesn't -else - # here a will be String -end - - -if (a = @a).responds_to?(:abs) - # here a is guaranteed to respond to `abs` -end - - -def capture(&block) - block -end - -def invoke(&block) - block.call -end - -proc = capture { puts "Hello" } -invoke(&proc) # prints "Hello" - - - - -def capture(&block) - block -end - -def twice - yield - yield -end - -proc = capture { puts "Hello" } -twice &proc - - -twice &->{ puts "Hello" } - - -def say_hello - puts "Hello" -end - -twice &->say_hello - - -def foo - yield 1 -end - -def wrap_foo - puts "Before foo" - foo do |x| - yield x - end - puts "After foo" -end - -wrap_foo do |i| - puts i -end - - -def foo - yield 1 -end - -def wrap_foo(&block : Int32 -> _) - puts "Before foo" - foo(&block) - puts "After foo" -end - -wrap_foo do |i| - puts i -end - - -foo_forward do |i| - break # error -end - - -a = 2 -while (a += 1) < 20 - if a == 10 - # goes to 'puts a' - break - end -end -puts a #=> 10 - - -class Person - private def say(message) - puts message - end - - def say_hello - say "hello" # OK, no receiver - self.say "hello" # Error, self is a receiver - - other = Person.new "Other" - other.say "hello" # Error, other is a receiver - end -end - - -class Employee < Person - def say_bye - say "bye" # OK - end -end - - -module Namespace - class Foo - protected def foo - puts "Hello" - end - end - - class Bar - def bar - # Works, because Foo and Bar are under Namespace - Foo.new.foo - end - end -end - -Namespace::Bar.new.bar - - -class Person - protected def self.say(message) - puts message - end - - def say_hello - Person.say "hello" - end -end - - -buffer = uninitialized UInt8[256] diff --git a/tests/examplefiles/test.cs b/tests/examplefiles/test.cs deleted file mode 100644 index faab7e42..00000000 --- a/tests/examplefiles/test.cs +++ /dev/null @@ -1,374 +0,0 @@ -//////////////////////////////////////////////////////////////////////////////// -// // -// MIT X11 license, Copyright (c) 2005-2006 by: // -// // -// Authors: // -// Michael Dominic K. // -// // -// Permission is hereby granted, free of charge, to any person obtaining a // -// copy of this software and associated documentation files (the "Software"), // -// to deal in the Software without restriction, including without limitation // -// the rights to use, copy, modify, merge, publish, distribute, sublicense, // -// and/or sell copies of the Software, and to permit persons to whom the // -// Software is furnished to do so, subject to the following conditions: // -// // -// The above copyright notice and this permission notice shall be included // -// in all copies or substantial portions of the Software. // -// // -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // -// USE OR OTHER DEALINGS IN THE SOFTWARE. // -// // -//////////////////////////////////////////////////////////////////////////////// - -namespace Diva.Core { - - using System; - using Widgets; - using System.Xml; - using Util; - using System.Collections.Generic; - using System.Collections; - using Basics; - - public class OpenerTask : Task, IBoilProvider { - - // Private structs //////////////////////////////////////////// - - struct ObjectInfo { - - public ObjectContainer Container; - public int[] Depends; - public string SystemType; - public int RefId; - - /* CONSTRUCTOR */ - public ObjectInfo (ObjectContainer container) - { - Container = container; - Depends = container.Depends.ToArray (); - SystemType = container.SystemType; - RefId = container.RefId; - } - - public override string ToString () - { - return String.Format ("Type: {0} Deps count: {1} Id: {2}", - SystemType, Depends.Length, RefId); - } - - public bool IsUnBoilable (IBoilProvider provider) - { - if (Depends.Length == 0) - return true; - - foreach (int id in Depends) - if (! (provider.Contains (id))) - return false; - - return true; - } - - } - - // Enums ////////////////////////////////////////////////////// - - enum OpenerTaskStep { Init, Header, ProjectInfoRead, ObjectListRead, - ObjectListParse, ObjectListUnBoil, FindRoots, - Finished }; - - // Fields ///////////////////////////////////////////////////// - - string fileName; // Filename we're reading - XmlDocument xmlDocument; // Our document - //XmlNode projectInfoNode; // node - IEnumerator objectsEnumerator; // Enumerator - List objectsList; // Objects list - ObjectListContainer objectListContainer; - OpenerTaskStep currentStep; // Our current step - - Dictionary idToObject; // Id -> object - Dictionary objectToId; // Object -> Id - - string projectName = String.Empty; - string projectDirectory = String.Empty; - TagList projectTagList; - StuffList projectStuffList; - TrackList projectTrackList; - ClipList projectClipList; - MediaItemList projectMediaItemList; - Commander projectCommander; - Gdv.Pipeline projectPipeline; - Gdv.ProjectFormat projectFormat; - - // Properties ///////////////////////////////////////////////// - - public string ProjectName { - get { return projectName; } - } - - public string ProjectDirectory { - get { return projectDirectory; } - } - - public TagList ProjectTagList { - get { return projectTagList; } - } - - public StuffList ProjectStuffList { - get { return projectStuffList; } - } - - public TrackList ProjectTrackList { - get { return projectTrackList; } - } - - public ClipList ProjectClipList { - get { return projectClipList; } - } - - public MediaItemList ProjectMediaItemList { - get { return projectMediaItemList; } - } - - public Commander ProjectCommander { - get { return projectCommander; } - } - - public Gdv.Pipeline ProjectPipeline { - get { return projectPipeline; } - } - - public Gdv.ProjectFormat ProjectFormat { - get { return projectFormat; } - } - - // Public methods ///////////////////////////////////////////// - - /* CONSTRUCTOR */ - public OpenerTask (string fileName) - { - this.fileName = fileName; - var verbatimString = @"c:\test\"; - - var verbatimStringWithNewline = @"test \\ \n \t \r -a -b -c"; - var verbatimStringWithEscapedQuotes = @"He said -""she says \"" is not an escaped character in verbatimstrings"" -"; - - int[] numbers = { 5,6,4,2,4,6,8,9,7,0 }; - var linqExample = from n in numbers - where n > 5 - select n; - - var anotherlinqExample = from n in numbers - orderby n descending - select n; - - int[] someMoreNumbers = { 8,2,17,34,8,9,9,5,3,4,2,1,5 }; - var moreLinq = from n in numbers - join mn in moreNumbers on n equals mn + 2 - select new {n, mn}; - } - - public override void Reset () - { - objectToId = new Dictionary (); - idToObject = new Dictionary (); - - xmlDocument = null; - //projectInfoNode = null; - - currentStep = OpenerTaskStep.Init; - - base.Reset (); - } - - public int GetIdForObject (object o) - { - return objectToId [o]; - } - - public object GetObjectForId (int id) - { - return idToObject [id]; - } - - public bool Contains (int id) - { - return idToObject.ContainsKey (id); - } - - // Private methods //////////////////////////////////////////// - - protected override TaskStatus ExecuteStep (int s) - { - bool cont = true; - - // Main - switch (currentStep) { - - case OpenerTaskStep.Init: - objectsList = new List (); - xmlDocument = new XmlDocument (); - xmlDocument.Load (fileName); - currentStep = OpenerTaskStep.Header; - break; - - case OpenerTaskStep.Header: - //ReadHeader (); - currentStep = OpenerTaskStep.ProjectInfoRead; - break; - - case OpenerTaskStep.ProjectInfoRead: - foreach (XmlNode node in xmlDocument.DocumentElement.ChildNodes) - if (node.Name == "projectinfo") - ResolveProjectInfoNode (node); - - // FIXME: Fail if not found/not resolved - currentStep = OpenerTaskStep.ObjectListRead; - break; - - case OpenerTaskStep.ObjectListRead: - foreach (XmlNode node in xmlDocument.DocumentElement.ChildNodes) - if (node.Name == "objectlist") - objectListContainer = (ObjectListContainer) - DataFactory.MakeDataElement (node as XmlElement); - - if (objectListContainer == null) - throw new Exception ("ObjectListContainer not found!"); - - currentStep = OpenerTaskStep.ObjectListParse; - break; - - case OpenerTaskStep.ObjectListParse: - bool flush = EnumerateSomeObjects (); - if (flush) - currentStep = OpenerTaskStep.ObjectListUnBoil; - break; - - case OpenerTaskStep.ObjectListUnBoil: - bool done = UnBoilSomeObjects (); - if (done) - currentStep = OpenerTaskStep.FindRoots; - break; - - - case OpenerTaskStep.FindRoots: - projectTrackList = (TrackList) FindRoot ("tracklist"); - projectTagList = (TagList) FindRoot ("taglist"); - projectStuffList = (StuffList) FindRoot ("stufflist"); - projectClipList = (ClipList) FindRoot ("cliplist"); - projectMediaItemList = (MediaItemList) FindRoot ("mediaitemlist"); - projectPipeline = (Gdv.Pipeline) FindRoot ("pipeline"); - projectCommander = (Commander) FindRoot ("commander"); - projectFormat = (Gdv.ProjectFormat) FindRoot ("projectformat"); - - currentStep = OpenerTaskStep.Finished; - break; - - case OpenerTaskStep.Finished: - cont = false; - break; - - default: - break; - } - - // Post - if (cont) - return TaskStatus.Running; - else - return TaskStatus.Done; - } - - /* - void ReadHeader () - { - // FIXME: Read all the attributes from the element - }*/ - - void ResolveProjectInfoNode (XmlNode node) - { - foreach (XmlNode childNode in node) { - - switch (childNode.Name) { - - case "name": - projectName = childNode.FirstChild.Value; - break; - - case "directory": - projectDirectory = childNode.FirstChild.Value; - break; - - // FIXME: Duration etc. - } - } - } - - bool EnumerateSomeObjects () - { - if (objectsEnumerator == null) - objectsEnumerator = objectListContainer.FindAllObjects ().GetEnumerator (); - - for (int i = 0; i < 10; i++) { - if (objectsEnumerator.MoveNext () == false) - return true; - - ObjectContainer container = (ObjectContainer) - objectsEnumerator.Current; - - ObjectInfo newInfo = new ObjectInfo (container); - objectsList.Add (newInfo); - } - - return false; - } - - ObjectInfo GetNextCandidate () - { - foreach (ObjectInfo objInfo in objectsList) - if (objInfo.IsUnBoilable (this)) - return objInfo; - - throw new Exception ("FIXME: No more unboilable objects found. Recursive?"); - } - - bool UnBoilSomeObjects () - { - for (int i = 0; i < 5; i++) { - // All unboiled - if (objectsList.Count == 0) - return true; - - ObjectInfo objInfo = GetNextCandidate (); - - object o = BoilFactory.UnBoil (objInfo.Container, this); - objectsList.Remove (objInfo); - - // Add - idToObject [objInfo.RefId] = o; - objectToId [o] = objInfo.RefId; - - } - - return false; - } - - object FindRoot (string rootString) - { - ObjectContainer container = objectListContainer.FindObjectContainer (rootString); - return idToObject [container.RefId]; - } - - } - -} diff --git a/tests/examplefiles/test.csd b/tests/examplefiles/test.csd deleted file mode 100644 index 6512d99e..00000000 --- a/tests/examplefiles/test.csd +++ /dev/null @@ -1,18 +0,0 @@ -/* - * comment - */ -; comment -// comment -/ - - -0dbfs = 1 -prints "hello, world\n" - - -i 1 0 0 - - - - - diff --git a/tests/examplefiles/test.css b/tests/examplefiles/test.css deleted file mode 100644 index 3f9ffb20..00000000 --- a/tests/examplefiles/test.css +++ /dev/null @@ -1,54 +0,0 @@ -body { - font-size: 12pt; - background: #fff url(temp.png) top left no-repeat; -} - -* html body { - font-size: 14pt; -} - -#nav .new { - display: block; - -webkit-border-radius: 5px; - -moz-border-radius: 5px; - -ms-border-radius: 5px; - -o-border-radius: 5px; - border-radius: 5px; -} - -ul#nav li.new { - font-weight: bold; -} - -:link { - color: #f00; -} - -:link:hover { - color: #0f0; -} - -@media screen { - body { - background: #ccc; - } -} - -@namespace "http://www.w3.org/1999/xhtml"; - -@import url("mystyle.css"); - -@charset "ISO-8859-1"; - -@font-face { font-family: "Example Font"; src: url("http://www.example.com/fonts/example"); } - -@media screen { body { font-size: 16px } } @media print { body { font-size: 12pt } } - - -@page { body { margin: 1in 1.5in; } } - -@page linke-seite:left { body { margin:20mm; margin-right:25mm; } } - -@-moz-document url-prefix(http://pygments.org) { a {font-style: normal !important;} } - - diff --git a/tests/examplefiles/test.cu b/tests/examplefiles/test.cu deleted file mode 100644 index 19f66802..00000000 --- a/tests/examplefiles/test.cu +++ /dev/null @@ -1,36 +0,0 @@ -#include - -// __device__ function -__device__ void func() -{ - short* array0 = (short*)array; - float* array1 = (float*)&array0[127]; -} - -/* __global__ function */ -__global__ static void reduction(const float* __restrict__ input, float *output, clock_t *timer) -{ - // __shared__ float shared[2 * blockDim.x]; - extern __shared__ float shared[]; - - const int tid = threadIdx.x; - const int bid = blockIdx.x; - - if (threadIdx.x == 0) { - __threadfence(); - } - - // Perform reduction to find minimum. - for (int d = blockDim.x; d > 0; d /= 2) - { - __syncthreads(); - } -} - -int main(int argc, char **argv) -{ - dim3 dimBlock(8, 8, 1); - - timedReduction<<>>(dinput, doutput, dtimer); - cudaDeviceReset(); -} diff --git a/tests/examplefiles/test.cyp b/tests/examplefiles/test.cyp deleted file mode 100644 index 37465a4d..00000000 --- a/tests/examplefiles/test.cyp +++ /dev/null @@ -1,123 +0,0 @@ -//test comment -START a = node(*) -MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d) -RETURN a.name, m.title, d.name; - -START a = node(*) -MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d) -WITH d,m,count(a) as Actors -WHERE Actors > 4 -RETURN d.name as Director,m.title as Movie, Actors ORDER BY Actors; - -START a=node(*) -MATCH p=(a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d) -return p; - -START a = node(*) -MATCH p1=(a)-[:ACTED_IN]->(m), p2=d-[:DIRECTED]->(m) -WHERE m.title="The Matrix" -RETURN p1, p2; - -START a = node(*) -MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d) -WHERE a=d -RETURN a.name; - -START a = node(*) -MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d) -WHERE a=d -RETURN a.name; - -START a=node(*) -MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d) -RETURN a.name, d.name, count(*) as Movies,collect(m.title) as Titles -ORDER BY (Movies) DESC -LIMIT 5; - -START keanu=node:node_auto_index(name="Keanu Reeves") -RETURN keanu; - -START keanu=node:node_auto_index(name="Keanu Reeves") -MATCH (keanu)-[:ACTED_IN]->(movie) -RETURN movie.title; - -START keanu=node:node_auto_index(name="Keanu Reeves") -MATCH (keanu)-[r:ACTED_IN]->(movie) -WHERE "Neo" in r.roles -RETURN DISTINCT movie.title; - -START keanu=node:node_auto_index(name="Keanu Reeves") -MATCH (keanu)-[:ACTED_IN]->()<-[:DIRECTED]-(director) -RETURN director.name; - -START keanu=node:node_auto_index(name="Keanu Reeves") -MATCH (keanu)-[:ACTED_IN]->(movie)<-[:ACTED_IN]-(n) -WHERE n.born < keanu.born -RETURN DISTINCT n.name, keanu.born ,n.born; - -START keanu=node:node_auto_index(name="Keanu Reeves"), - hugo=node:node_auto_index(name="Hugo Weaving") -MATCH (keanu)-[:ACTED_IN]->(movie) -WHERE NOT((hugo)-[:ACTED_IN]->(movie)) -RETURN DISTINCT movie.title; - -START a = node(*) -MATCH (a)-[:ACTED_IN]->(m) -WITH a,count(m) as Movies -RETURN a.name as Actor, Movies ORDER BY Movies; - -START keanu=node:node_auto_index(name="Keanu Reeves"),actor -MATCH past=(keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(), - actors=(actor)-[:ACTED_IN]->() -WHERE hasnt=actors NOT IN past -RETURN hasnt; - -START keanu=node:node_auto_index(name="Keanu Reeves") -MATCH (keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(c), - (c)-[:ACTED_IN]->()<-[:ACTED_IN]-(coc) -WHERE NOT((keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(coc)) -AND coc > keanu -RETURN coc.name, count(coc) -ORDER BY count(coc) DESC -LIMIT 3; - -START kevin=node:node_auto_index(name="Kevin Bacon"), - movie=node:node_auto_index(name="Mystic River") -MATCH (kevin)-[:ACTED_IN]->(movie) -RETURN DISTINCT movie.title; - -CREATE (n - { - title:"Mystic River", - released:1993, - tagline:"We bury our sins here, Dave. We wash them clean." - } - ) RETURN n; - - -START movie=node:node_auto_index(title="Mystic River") -SET movie.released = 2003 -RETURN movie; - -start emil=node:node_auto_index(name="Emil Eifrem") MATCH emil-[r]->(n) DELETE r, emil; - -START a=node(*) -MATCH (a)-[:ACTED_IN]->()<-[:ACTED_IN]-(b) -CREATE UNIQUE (a)-[:KNOWS]->(b); - -START keanu=node:node_auto_index(name="Keanu Reeves") -MATCH (keanu)-[:KNOWS*2]->(fof) -WHERE keanu <> fof -RETURN distinct fof.name; - -START charlize=node:node_auto_index(name="Charlize Theron"), - bacon=node:node_auto_index(name="Kevin Bacon") -MATCH p=shortestPath((charlize)-[:KNOWS*]->(bacon)) -RETURN extract(n in nodes(p) | n.name)[1]; - -START actors=node: - -MATCH (alice)-[:`REALLY LIKES`]->(bob) -MATCH (alice)-[:`REALLY ``LIKES```]->(bob) -myFancyIdentifier.`(weird property name)` -"string\t\n\b\f\\\''\"" diff --git a/tests/examplefiles/test.d b/tests/examplefiles/test.d deleted file mode 100644 index 02fe8f73..00000000 --- a/tests/examplefiles/test.d +++ /dev/null @@ -1,135 +0,0 @@ -// Created by Lionello Lunesu and placed in the public domain. -// This file has been modified from its original version. -// It has been formatted to fit your screen. -module phoneno; // optional -import std.stdio; // writefln -import std.ctype; // isdigit -import std.stream; // BufferedFile - -// Just for readability (imagine char[][][char[]]) -alias char[] string; -alias string[] stringarray; - -/// Strips non-digit characters from the string (COW) -string stripNonDigit( in string line ) -{ - string ret; - foreach(uint i, c; line) { - // Error: std.ctype.isdigit at C:\dmd\src\phobos\std\ctype.d(37) - // conflicts with std.stream.isdigit at C:\dmd\src\phobos\std\stream.d(2924) - if (!std.ctype.isdigit(c)) { - if (!ret) - ret = line[0..i]; - } - else if (ret) - ret ~= c; - } - return ret?ret:line; -} - -unittest { - assert( stripNonDigit("asdf") == "" ); - assert( stripNonDigit("\'13-=2 4kop") == "1324" ); -} - -/// Converts a word into a number, ignoring all non alpha characters -string wordToNum( in string word ) -{ -// translation table for the task at hand -const char[256] TRANSLATE = - " " // 0 - " 0123456789 " // 32 - " 57630499617851881234762239 " // 64 - " 57630499617851881234762239 " - " " - " " - " " - " "; - string ret; - foreach(c; cast(ubyte[])word) - if (TRANSLATE[c] != ' ') - ret ~= TRANSLATE[c]; - return ret; -} - -unittest { - // Test wordToNum using the table from the task description. - assert( "01112223334455666777888999" == - wordToNum("E | J N Q | R W X | D S Y | F T | A M | C I V | B K U | L O P | G H Z")); - assert( "01112223334455666777888999" == - wordToNum("e | j n q | r w x | d s y | f t | a m | c i v | b k u | l o p | g h z")); - assert( "0123456789" == - wordToNum("0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9")); -} - -void main( string[] args ) -{ - // This associative array maps a number to an array of words. - stringarray[string] num2words; - - foreach(string word; new BufferedFile("dictionary.txt" ) ) - num2words[ wordToNum(word) ] ~= word.dup; // must dup - - /// Finds all alternatives for the given number - /// (should have been stripped from non-digit characters) - stringarray _FindWords( string numbers, bool digitok ) - in { - assert(numbers.length > 0); - } - out(result) { - foreach (a; result) - assert( wordToNum(a) == numbers ); - } - body { - stringarray ret; - bool foundword = false; - for (uint t=1; t<=numbers.length; ++t) { - auto alternatives = numbers[0..t] in num2words; - if (!alternatives) - continue; - foundword = true; - if (numbers.length > t) { - // Combine all current alternatives with all alternatives - // of the rest (next piece can start with a digit) - foreach (a2; _FindWords( numbers[t..$], true ) ) - foreach(a1; *alternatives) - ret ~= a1 ~ " " ~ a2; - } - else - ret ~= *alternatives; // append these alternatives - } - // Try to keep 1 digit, only if we're allowed and no other - // alternatives were found - // Testing "ret.length" makes more sense than testing "foundword", - // but the other implementations seem to do just this. - if (digitok && !foundword) { //ret.length == 0 - if(numbers.length > 1) { - // Combine 1 digit with all altenatives from the rest - // (next piece can not start with a digit) - foreach (a; _FindWords( numbers[1..$], false ) ) - ret ~= numbers[0..1] ~ " " ~ a; - } - else - ret ~= numbers[0..1]; // just append this digit - } - return ret; - } - - /// (This function was inlined in the original program) - /// Finds all alternatives for the given phone number - /// Returns: array of strings - stringarray FindWords( string phone_number ) - { - if (!phone_number.length) - return null; - // Strip the non-digit characters from the phone number, and - // pass it to the recursive function (leading digit is allowed) - return _FindWords( stripNonDigit(phone_number), true ); - } - - // Read the phone numbers - foreach(string phone; new BufferedFile("input.txt" ) ) - foreach(alternative; FindWords( phone ) ) - writefln(phone, ": ", alternative ); -} - diff --git a/tests/examplefiles/test.dart b/tests/examplefiles/test.dart deleted file mode 100644 index aa1fb0ed..00000000 --- a/tests/examplefiles/test.dart +++ /dev/null @@ -1,23 +0,0 @@ -// Greeter example from -// -class Greeter implements Comparable { - String prefix = 'Hello,'; - Greeter() {} - Greeter.withPrefix(this.prefix); - greet(String name) => print('$prefix $name'); - - int compareTo(Greeter other) => prefix.compareTo(other.prefix); -} - -void main() { - Greeter greeter = new Greeter(); - Greeter greeter2 = new Greeter.withPrefix('Hi,'); - - num result = greeter2.compareTo(greeter); - if (result == 0) { - greeter2.greet('you are the same.'); - } else { - greeter2.greet('you are different.'); - } -} - diff --git a/tests/examplefiles/test.dtd b/tests/examplefiles/test.dtd deleted file mode 100644 index 639b411a..00000000 --- a/tests/examplefiles/test.dtd +++ /dev/null @@ -1,89 +0,0 @@ - - - - - - -]> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -]]> - -]]> - -]> - - diff --git a/tests/examplefiles/test.ebnf b/tests/examplefiles/test.ebnf deleted file mode 100644 index a96171b0..00000000 --- a/tests/examplefiles/test.ebnf +++ /dev/null @@ -1,31 +0,0 @@ -letter = "A" | "B" | "C" | "D" | "E" | "F" | "G" - | "H" | "I" | "J" | "K" | "L" | "M" | "N" - | "O" | "P" | "Q" | "R" | "S" | "T" | "U" - | "V" | "W" | "X" | "Y" | "Z" ; -digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ; -symbol = "[" | "]" | "{" | "}" | "(" | ")" | "<" | ">" - | "'" | '"' | "=" | "|" | "." | "," | ";" ; -character = letter | digit | symbol | " " ; - -identifier = letter , { letter | digit | " " } ; -terminal = "'" , character , { character } , "'" - | '"' , character , { character } , '"' ; - -special = "?" , any , "?" ; - -comment = (* this is a comment "" *) "(*" , any-symbol , "*)" ; -any-symbol = ? any visible character ? ; (* ? ... ? *) - -lhs = identifier ; -rhs = identifier - | terminal - | comment , rhs - | rhs , comment - | "[" , rhs , "]" - | "{" , rhs , "}" - | "(" , rhs , ")" - | rhs , "|" , rhs - | rhs , "," , rhs ; - -rule = lhs , "=" , rhs , ";" | comment ; -grammar = { rule } ; diff --git a/tests/examplefiles/test.ec b/tests/examplefiles/test.ec deleted file mode 100644 index 37868b52..00000000 --- a/tests/examplefiles/test.ec +++ /dev/null @@ -1,605 +0,0 @@ -namespace gui; - -import "Window" - -public struct AnchorValue -{ - AnchorValueType type; - - union - { - int distance; - float percent; - }; - property int - { - set { distance = value; type = offset; } - get { return distance; } - } - property double - { - set { percent = (float) value; type = relative; } - get { return (double) percent; } - } - - char * OnGetString(char * stringOutput, void * fieldData, bool * needClass) - { - if(type == offset) - { - sprintf(stringOutput, "%d", distance); - } - else if(type == relative) - { - int c; - int last = 0; - sprintf(stringOutput, "%f", percent); - c = strlen(stringOutput)-1; - for( ; c >= 0; c--) - { - if(stringOutput[c] != '0') - last = Max(last, c); - if(stringOutput[c] == '.') - { - if(last == c) - { - stringOutput[c+1] = '0'; - stringOutput[c+2] = 0; - } - else - stringOutput[last+1] = 0; - break; - } - } - } - if(needClass) *needClass = false; - return stringOutput; - } - - bool OnGetDataFromString(char * stringOutput) - { - char * end; - if(strchr(stringOutput, '.')) - { - float percent = (float)strtod(stringOutput, &end); - - if(end != stringOutput) - { - this.percent = percent; - type = relative; - return true; - } - } - else if(stringOutput[0]) - { - int distance = strtol(stringOutput, &end, 0); - if(end != stringOutput) - { - this.distance = distance; - type = offset; - return true; - } - } - else - { - distance = 0; - type = 0; - } - return false; - } -}; - -public struct MiddleAnchorValue -{ - AnchorValueType type; - - union - { - int distance; - float percent; - }; - property int - { - set { distance = value; type = none; } - get { return distance; } - } - property double - { - set { percent = (float) value; type = middleRelative; } - get { return (double) percent; } - } - - char * OnGetString(char * stringOutput, void * fieldData, bool * needClass) - { - if(type == middleRelative) - { - int c; - int last = 0; - sprintf(stringOutput, "%f", percent); - c = strlen(stringOutput)-1; - for( ; c >= 0; c--) - { - if(stringOutput[c] != '0') - last = Max(last, c); - if(stringOutput[c] == '.') - { - if(last == c) - { - stringOutput[c+1] = '0'; - stringOutput[c+2] = 0; - } - else - stringOutput[last+1] = 0; - break; - } - } - } - else if(type == none && distance) - { - sprintf(stringOutput, "%d", distance); - } - if(needClass) *needClass = false; - return stringOutput; - } - - bool OnGetDataFromString(char * stringOutput) - { - if(strchr(stringOutput, '.')) - { - percent = (float)strtod(stringOutput, null); - type = middleRelative; - } - else - { - distance = strtol(stringOutput, null, 0); - type = none; - } - return true; - } -}; - -public enum AnchorValueType { none, offset, relative, middleRelative, cascade, vTiled, hTiled }; - -public struct Anchor -{ - union { AnchorValue left; MiddleAnchorValue horz; }; - union { AnchorValue top; MiddleAnchorValue vert; }; - AnchorValue right, bottom; - - char * OnGetString(char * stringOutput, void * fieldData, bool * needClass) - { - char tempString[256]; - char * anchorValue; - bool subNeedClass; - - tempString[0] = '\0'; - anchorValue = left.OnGetString(tempString, null, &subNeedClass); - if(anchorValue[0]) { if(stringOutput[0]) strcat(stringOutput, ", "); strcat(stringOutput, "left = "); strcat(stringOutput, anchorValue); } - - //if(((!left.type && !right.type) && horz.distance) || horz.type == middleRelative) - if(!right.type && ((!left.type && horz.distance) || horz.type == middleRelative)) - { - tempString[0] = '\0'; - anchorValue = horz.OnGetString(tempString, null, &subNeedClass); - if(anchorValue[0]) { if(stringOutput[0]) strcat(stringOutput, ", "); strcat(stringOutput, "horz = "); strcat(stringOutput, anchorValue); } - } - - tempString[0] = '\0'; - anchorValue = top.OnGetString(tempString, null, &subNeedClass); - if(anchorValue[0]) { if(stringOutput[0]) strcat(stringOutput, ", "); strcat(stringOutput, "top = "); strcat(stringOutput, anchorValue); } - - tempString[0] = '\0'; - anchorValue = right.OnGetString(tempString, null, &subNeedClass); - if(anchorValue[0]) { if(stringOutput[0]) strcat(stringOutput, ", "); strcat(stringOutput, "right = "); strcat(stringOutput, anchorValue); } - - // if(((!top.type && !bottom.type) && vert.distance) || vert.type == middleRelative) - if(!bottom.type && ((!top.type && vert.distance) || vert.type == middleRelative)) - { - tempString[0] = '\0'; - anchorValue = vert.OnGetString(tempString, null, &subNeedClass); - if(anchorValue[0]) { if(stringOutput[0]) strcat(stringOutput, ", "); strcat(stringOutput, "vert = "); strcat(stringOutput, anchorValue); } - } - - tempString[0] = '\0'; - anchorValue = bottom.OnGetString(tempString, null, &subNeedClass); - if(anchorValue[0]) { if(stringOutput[0]) strcat(stringOutput, ", "); strcat(stringOutput, "bottom = "); strcat(stringOutput, anchorValue); } - - return stringOutput; - } - - bool OnGetDataFromString(char * string) - { - this = Anchor {}; - return class::OnGetDataFromString(string); - } - - bool OnSaveEdit(DropBox dropBox, void * object) - { - return dropBox.Save(); - } - - Window OnEdit(Window listBox, Window master, int x, int y, int w, int h, Window control) - { - char * string = ""; - AnchorDropBox comboBox - { - editText = true; - parent = listBox; - master = master; - position = Point { x, y }; - //clientSize = Size { h = h }; - //size.w = w; - size = { w, h }; - anchorValue = this; - control = control; - borderStyle = 0; - }; - - comboBox.Create(); - - { - char tempString[MAX_F_STRING] = ""; - bool needClass = false; - char * result = OnGetString(tempString, null, &needClass); - if(result) string = result; - } - comboBox.contents = string; - return comboBox; - } -}; - -private class AnchorButton : Button -{ - toggle = true, bevel = false; - - void OnRedraw(Surface surface) - { - int cw = clientSize.w; - int ch = clientSize.h; - - surface.SetForeground(black); - if(checked) - { - surface.SetBackground(Color { 85,85,85 }); - surface.Area(0,0, cw-1, ch-1); - } - else - surface.LineStipple(0xAAAA); - - surface.Rectangle(0,0,cw-1,ch-1); - - if(active) - { - surface.LineStipple(0xAAAA); - surface.Rectangle(2,2,cw-3,ch-3); - } - } - - bool AnchorEditor::NotifyClicked(Button button, int x, int y, Modifiers mods) - { - AnchorDropBox anchorDropBox = (AnchorDropBox)master; - Anchor anchor = anchorDropBox.anchorValue; - Window control = anchorDropBox.control; - DataBox dropMaster = (DataBox)anchorDropBox.master; - int id = button.id; - - switch(id) - { - case 0: anchor.left.type = button.checked ? offset : none; break; - case 1: anchor.top.type = button.checked ? offset : none; break; - case 2: anchor.right.type = button.checked ? offset : none; break; - case 3: anchor.bottom.type = button.checked ? offset : none; break; - } - - if(anchor.horz.type == middleRelative && (id == 0 || id == 2)) - { - anchorDropBox.relButtons[0].checked = false; - anchorDropBox.relButtons[2].checked = false; - } - if(anchor.vert.type == middleRelative && (id == 1 || id == 3)) - { - anchorDropBox.relButtons[1].checked = false; - anchorDropBox.relButtons[3].checked = false; - } - anchorDropBox.relButtons[id].checked = false; - - //anchor.horz.type = none; - //anchor.vert.type = none; - - { - int vpw, vph; - int x,y,w,h; - Window parent = control.parent; - - // Fix Anchor - x = control.position.x; - y = control.position.y; - w = control.size.w; - h = control.size.h; - - vpw = parent.clientSize.w; - vph = parent.clientSize.h; - if(control.nonClient) - { - vpw = parent.size.w; - vph = parent.size.h; - } - else if(((BorderBits)control.borderStyle).fixed) - { - if(!control.dontScrollHorz && parent.scrollArea.w) vpw = parent.scrollArea.w; - if(!control.dontScrollVert && parent.scrollArea.h) vph = parent.scrollArea.h; - } - - if(anchor.left.type == offset) anchor.left.distance = x; - else if(anchor.left.type == relative) anchor.left.percent = (float)x / vpw; - if(anchor.top.type == offset) anchor.top.distance = y; - else if(anchor.top.type == relative) anchor.top.percent = (float)y / vph; - if(anchor.right.type == offset) anchor.right.distance = vpw - (x + w); - //else if(anchor.right.type == relative) anchor.right.percent = (float) (x + w) / vpw; - else if(anchor.right.type == relative) anchor.right.percent = (float) (vpw - (x + w)) / vpw; - if(anchor.bottom.type == offset) anchor.bottom.distance = vph - (y + h); - //else if(anchor.bottom.type == relative) anchor.bottom.percent = (float) (y + h) / vph; - else if(anchor.bottom.type == relative) anchor.bottom.percent = (float) (vph - (y + h)) / vph; - - if(!anchor.left.type && !anchor.right.type) - { - anchor.horz.distance = (x + w / 2) - (vpw / 2); - //anchor.horz.type = anchor.horz.distance ? offset : 0; - } - else if(anchor.horz.type == middleRelative) anchor.horz.percent = (float) ((x + w / 2) - (vpw / 2)) / vpw; - if(!anchor.top.type && !anchor.bottom.type) - { - anchor.vert.distance = (y + h / 2) - (vph / 2); - //anchor.vert.type = anchor.vert.distance ? offset : 0; - } - else if(anchor.vert.type == middleRelative) anchor.vert.percent = (float)((y + h / 2) - (vph / 2)) / vph; - } - - { - char tempString[1024] = ""; - bool needClass = false; - char * string = anchor.OnGetString(tempString, null, &needClass); - anchorDropBox.contents = string; - } - - dropMaster.SetData(&anchor, false); - anchorDropBox.anchorValue = anchor; - return true; - } -} - -private class AnchorRelButton : Button -{ - toggle = true; - bevel = false; - text = "%"; - //bevelOver = true; - - void OnRedraw(Surface surface) - { - int cw = clientSize.w; - int ch = clientSize.h; - - if(checked) - { - surface.SetForeground(black); - } - else - { - surface.SetForeground(Color{170,170,170}); - } - surface.WriteText(5,2, "%", 1); - - if(active) - { - surface.LineStipple(0xAAAA); - surface.Rectangle(3,3,cw-4,ch-4); - } - } - - bool AnchorEditor::NotifyClicked(Button button, int x, int y, Modifiers mods) - { - AnchorDropBox anchorDropBox = (AnchorDropBox)master; - Anchor anchor = anchorDropBox.anchorValue; - Window control = anchorDropBox.control; - DataBox dropMaster = (DataBox)anchorDropBox.master; - int id = button.id; - - if((id == 0 || id == 2) && ((!anchor.left.type && !anchor.right.type) || anchor.left.type == middleRelative)) - { - if(button.checked) anchor.horz.type = middleRelative; else anchor.horz.type = none; - anchorDropBox.relButtons[(id + 2)%4].checked = button.checked; - } - else if((id == 1 || id == 3) && ((!anchor.top.type && !anchor.bottom.type) || anchor.top.type == middleRelative)) - { - if(button.checked) anchor.vert.type = middleRelative; else anchor.vert.type = none; - anchorDropBox.relButtons[(id + 2)%4].checked = button.checked; - } - else - { - switch(id) - { - case 0: anchor.left.type = button.checked ? relative : (anchor.left.type ? offset : none); break; - case 1: anchor.top.type = button.checked ? relative : (anchor.top.type ? offset : none); break; - case 2: anchor.right.type = button.checked ? relative : (anchor.right.type ? offset : none); break; - case 3: anchor.bottom.type = button.checked ? relative : (anchor.bottom.type ? offset : none); break; - } - anchorDropBox.buttons[id].checked = true; - if(anchor.horz.type == middleRelative) anchor.horz.type = none; - if(anchor.vert.type == middleRelative) anchor.vert.type = none; - } - - { - int vpw, vph; - int x,y,w,h; - Window parent = control.parent; - - // Fix Anchor - x = control.position.x; - y = control.position.y; - w = control.size.w; - h = control.size.h; - - vpw = parent.clientSize.w; - vph = parent.clientSize.h; - if(control.nonClient) - { - vpw = parent.size.w; - vph = parent.size.h; - } - else if(((BorderBits)control.borderStyle).fixed) - { - if(!control.dontScrollHorz && parent.scrollArea.w) vpw = parent.scrollArea.w; - if(!control.dontScrollVert && parent.scrollArea.h) vph = parent.scrollArea.h; - } - - if(anchor.left.type == offset) anchor.left.distance = x; - else if(anchor.left.type == relative) anchor.left.percent = (float)x / vpw; - if(anchor.top.type == offset) anchor.top.distance = y; - else if(anchor.top.type == relative) anchor.top.percent = (float)y / vph; - if(anchor.right.type == offset) anchor.right.distance = vpw - (x + w); - //else if(anchor.right.type == relative) anchor.right.percent = (float) (x + w) / vpw; - else if(anchor.right.type == relative) anchor.right.percent = (float) (vpw - (x + w)) / vpw; - if(anchor.bottom.type == offset) anchor.bottom.distance = vph - (y + h); - //else if(anchor.bottom.type == relative) anchor.bottom.percent = (float) (y + h) / vph; - else if(anchor.bottom.type == relative) anchor.bottom.percent = (float) (vph - (y + h)) / vph; - - if(!anchor.left.type && !anchor.right.type) - { - anchor.horz.distance = (x + w / 2) - (vpw / 2); - //anchor.horz.type = anchor.horz.distance ? offset : none; - } - else if(anchor.horz.type == middleRelative) anchor.horz.percent = (float) ((x + w / 2) - (vpw / 2)) / vpw; - if(!anchor.top.type && !anchor.bottom.type) - { - anchor.vert.distance = (y + h / 2) - (vph / 2); - //anchor.vert.type = anchor.vert.distance ? offset : none; - } - else if(anchor.vert.type == middleRelative) anchor.vert.percent = (float)((y + h / 2) - (vph / 2)) / vph; - } - - { - char tempString[1024] = ""; - bool needClass = false; - char * string = anchor.OnGetString(tempString, null, &needClass); - anchorDropBox.contents = string; - } - - dropMaster.SetData(&anchor, false); - anchorDropBox.anchorValue = anchor; - return true; - } -} - -private class AnchorEditor : Window -{ - interim = true; - borderStyle = deepContour; - size.h = 92; - - bool OnKeyDown(Key key, unichar ch) - { - if(key == escape) - return master.OnKeyDown(key, ch); - return true; - } -} - -private class AnchorDropBox : DropBox -{ - Anchor anchorValue; - Window control; - Button relButtons[4], buttons[4]; - - AnchorEditor anchorEditor - { - master = this; - autoCreate = false; - }; - - Window OnDropDown() - { - int c; - Button - { - anchorEditor, - anchor = Anchor { left = 28, top = 28, right = 28, bottom = 28 }, - inactive = true, disabled = true - }; - for(c = 0; c<4; c++) - { - Button button = buttons[c] = AnchorButton - { - anchorEditor, id = c, - size = Size { (c%2)?10:28, (c%2)?28:10 } - }; - Button relButton = relButtons[c] = AnchorRelButton - { - anchorEditor, id = c; - }; - - switch(c) - { - case 0: - if(anchorValue.left.type && anchorValue.left.type != middleRelative) button.checked = true; - if(anchorValue.left.type == relative || anchorValue.horz.type == middleRelative) relButton.checked = true; - - button.anchor = Anchor { left = 0 }; - relButton.anchor = Anchor { left = 5, vert = 16 }; - break; - case 1: - if(anchorValue.top.type && anchorValue.top.type != middleRelative) button.checked = true; - if(anchorValue.top.type == relative || anchorValue.vert.type == middleRelative) relButton.checked = true; - - button.anchor = Anchor { top = 0 }; - relButton.anchor = Anchor { top = 5, horz = 16 }; - break; - case 2: - if(anchorValue.right.type && anchorValue.right.type != middleRelative) button.checked = true; - if(anchorValue.right.type == relative || anchorValue.horz.type == middleRelative) relButton.checked = true; - - button.anchor = Anchor { right = 0 }; - relButton.anchor = Anchor { right = 5, vert = 16 }; - break; - case 3: - if(anchorValue.bottom.type && anchorValue.bottom.type != middleRelative) button.checked = true; - if(anchorValue.bottom.type == relative || anchorValue.vert.type == middleRelative) relButton.checked = true; - - button.anchor = Anchor { bottom = 0 }; - relButton.anchor = Anchor { bottom = 5, horz = 16 }; - break; - } - } - anchorEditor.Create(); - return anchorEditor; - } - - void OnCloseDropDown(Window anchorEditor) - { - // TOFIX: Patch for update bug - master.Update(null); - anchorEditor.Destroy(0); - } - - bool DataBox::NotifyTextEntry(AnchorDropBox dropBox, char * string, bool save) - { - Anchor anchor = dropBox.anchorValue; - Window control = dropBox.control; - - if(save) - { - if(anchor.OnGetDataFromString(string)) - { - SetData(&anchor, false); - dropBox.anchorValue = anchor; - } - } - else - { - char tempString[1024] = ""; - bool needClass = false; - char * string = anchor.OnGetString(tempString, null, &needClass); - dropBox.contents = string; - } - return true; - } -} diff --git a/tests/examplefiles/test.eh b/tests/examplefiles/test.eh deleted file mode 100644 index 1ed173fb..00000000 --- a/tests/examplefiles/test.eh +++ /dev/null @@ -1,315 +0,0 @@ -/* A Bison parser, made by GNU Bison 2.0. */ - -/* Skeleton parser for Yacc-like parsing with Bison, - Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2, or (at your option) - any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 59 Temple Place - Suite 330, - Boston, MA 02111-1307, USA. */ - -/* As a special exception, when this file is copied by Bison into a - Bison output file, you may use that output file without restriction. - This special exception was added by the Free Software Foundation - in version 1.24 of Bison. */ - -/* Tokens. */ -#ifndef YYTOKENTYPE -# define YYTOKENTYPE - /* Put the tokens into the symbol table, so that GDB and other debuggers - know about them. */ - enum yytokentype { - IDENTIFIER = 258, - CONSTANT = 259, - STRING_LITERAL = 260, - SIZEOF = 261, - PTR_OP = 262, - INC_OP = 263, - DEC_OP = 264, - LEFT_OP = 265, - RIGHT_OP = 266, - LE_OP = 267, - GE_OP = 268, - EQ_OP = 269, - NE_OP = 270, - AND_OP = 271, - OR_OP = 272, - MUL_ASSIGN = 273, - DIV_ASSIGN = 274, - MOD_ASSIGN = 275, - ADD_ASSIGN = 276, - SUB_ASSIGN = 277, - LEFT_ASSIGN = 278, - RIGHT_ASSIGN = 279, - AND_ASSIGN = 280, - XOR_ASSIGN = 281, - OR_ASSIGN = 282, - TYPE_NAME = 283, - TYPEDEF = 284, - EXTERN = 285, - STATIC = 286, - AUTO = 287, - REGISTER = 288, - CHAR = 289, - SHORT = 290, - INT = 291, - UINT = 292, - INT64 = 293, - LONG = 294, - SIGNED = 295, - UNSIGNED = 296, - FLOAT = 297, - DOUBLE = 298, - CONST = 299, - VOLATILE = 300, - VOID = 301, - VALIST = 302, - STRUCT = 303, - UNION = 304, - ENUM = 305, - ELLIPSIS = 306, - CASE = 307, - DEFAULT = 308, - IF = 309, - SWITCH = 310, - WHILE = 311, - DO = 312, - FOR = 313, - GOTO = 314, - CONTINUE = 315, - BREAK = 316, - RETURN = 317, - IFX = 318, - ELSE = 319, - CLASS = 320, - THISCLASS = 321, - CLASS_NAME = 322, - PROPERTY = 323, - SETPROP = 324, - GETPROP = 325, - NEWOP = 326, - RENEW = 327, - DELETE = 328, - EXT_DECL = 329, - EXT_STORAGE = 330, - IMPORT = 331, - DEFINE = 332, - VIRTUAL = 333, - EXT_ATTRIB = 334, - PUBLIC = 335, - PRIVATE = 336, - TYPED_OBJECT = 337, - ANY_OBJECT = 338, - _INCREF = 339, - EXTENSION = 340, - ASM = 341, - TYPEOF = 342, - WATCH = 343, - STOPWATCHING = 344, - FIREWATCHERS = 345, - WATCHABLE = 346, - CLASS_DESIGNER = 347, - CLASS_NO_EXPANSION = 348, - CLASS_FIXED = 349, - ISPROPSET = 350, - CLASS_DEFAULT_PROPERTY = 351, - PROPERTY_CATEGORY = 352, - CLASS_DATA = 353, - CLASS_PROPERTY = 354, - SUBCLASS = 355, - NAMESPACE = 356, - NEW0OP = 357, - RENEW0 = 358, - VAARG = 359, - DBTABLE = 360, - DBFIELD = 361, - DBINDEX = 362, - DATABASE_OPEN = 363 - }; -#endif -#define IDENTIFIER 258 -#define CONSTANT 259 -#define STRING_LITERAL 260 -#define SIZEOF 261 -#define PTR_OP 262 -#define INC_OP 263 -#define DEC_OP 264 -#define LEFT_OP 265 -#define RIGHT_OP 266 -#define LE_OP 267 -#define GE_OP 268 -#define EQ_OP 269 -#define NE_OP 270 -#define AND_OP 271 -#define OR_OP 272 -#define MUL_ASSIGN 273 -#define DIV_ASSIGN 274 -#define MOD_ASSIGN 275 -#define ADD_ASSIGN 276 -#define SUB_ASSIGN 277 -#define LEFT_ASSIGN 278 -#define RIGHT_ASSIGN 279 -#define AND_ASSIGN 280 -#define XOR_ASSIGN 281 -#define OR_ASSIGN 282 -#define TYPE_NAME 283 -#define TYPEDEF 284 -#define EXTERN 285 -#define STATIC 286 -#define AUTO 287 -#define REGISTER 288 -#define CHAR 289 -#define SHORT 290 -#define INT 291 -#define UINT 292 -#define INT64 293 -#define LONG 294 -#define SIGNED 295 -#define UNSIGNED 296 -#define FLOAT 297 -#define DOUBLE 298 -#define CONST 299 -#define VOLATILE 300 -#define VOID 301 -#define VALIST 302 -#define STRUCT 303 -#define UNION 304 -#define ENUM 305 -#define ELLIPSIS 306 -#define CASE 307 -#define DEFAULT 308 -#define IF 309 -#define SWITCH 310 -#define WHILE 311 -#define DO 312 -#define FOR 313 -#define GOTO 314 -#define CONTINUE 315 -#define BREAK 316 -#define RETURN 317 -#define IFX 318 -#define ELSE 319 -#define CLASS 320 -#define THISCLASS 321 -#define CLASS_NAME 322 -#define PROPERTY 323 -#define SETPROP 324 -#define GETPROP 325 -#define NEWOP 326 -#define RENEW 327 -#define DELETE 328 -#define EXT_DECL 329 -#define EXT_STORAGE 330 -#define IMPORT 331 -#define DEFINE 332 -#define VIRTUAL 333 -#define EXT_ATTRIB 334 -#define PUBLIC 335 -#define PRIVATE 336 -#define TYPED_OBJECT 337 -#define ANY_OBJECT 338 -#define _INCREF 339 -#define EXTENSION 340 -#define ASM 341 -#define TYPEOF 342 -#define WATCH 343 -#define STOPWATCHING 344 -#define FIREWATCHERS 345 -#define WATCHABLE 346 -#define CLASS_DESIGNER 347 -#define CLASS_NO_EXPANSION 348 -#define CLASS_FIXED 349 -#define ISPROPSET 350 -#define CLASS_DEFAULT_PROPERTY 351 -#define PROPERTY_CATEGORY 352 -#define CLASS_DATA 353 -#define CLASS_PROPERTY 354 -#define SUBCLASS 355 -#define NAMESPACE 356 -#define NEW0OP 357 -#define RENEW0 358 -#define VAARG 359 -#define DBTABLE 360 -#define DBFIELD 361 -#define DBINDEX 362 -#define DATABASE_OPEN 363 - - - - -#if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) -#line 42 "grammar.y" -typedef union YYSTYPE { - SpecifierType specifierType; - int i; - AccessMode declMode; - Identifier id; - Expression exp; - Specifier specifier; - OldList * list; - Enumerator enumerator; - Declarator declarator; - Pointer pointer; - Initializer initializer; - InitDeclarator initDeclarator; - TypeName typeName; - Declaration declaration; - Statement stmt; - FunctionDefinition function; - External external; - Context context; - AsmField asmField; - - Instantiation instance; - MembersInit membersInit; - MemberInit memberInit; - ClassFunction classFunction; - ClassDefinition _class; - ClassDef classDef; - PropertyDef prop; - char * string; - Symbol symbol; - PropertyWatch propertyWatch; - TemplateParameter templateParameter; - TemplateArgument templateArgument; - TemplateDatatype templateDatatype; - - DBTableEntry dbtableEntry; - DBIndexItem dbindexItem; - DBTableDef dbtableDef; -} YYSTYPE; -/* Line 1318 of yacc.c. */ -#line 293 "grammar.eh" -# define yystype YYSTYPE /* obsolescent; will be withdrawn */ -# define YYSTYPE_IS_DECLARED 1 -# define YYSTYPE_IS_TRIVIAL 1 -#endif - -extern YYSTYPE yylval; - -#if ! defined (YYLTYPE) && ! defined (YYLTYPE_IS_DECLARED) -typedef struct YYLTYPE -{ - int first_line; - int first_column; - int last_line; - int last_column; -} YYLTYPE; -# define yyltype YYLTYPE /* obsolescent; will be withdrawn */ -# define YYLTYPE_IS_DECLARED 1 -# define YYLTYPE_IS_TRIVIAL 1 -#endif - -extern YYLTYPE yylloc; - - diff --git a/tests/examplefiles/test.erl b/tests/examplefiles/test.erl deleted file mode 100644 index d4ab4825..00000000 --- a/tests/examplefiles/test.erl +++ /dev/null @@ -1,181 +0,0 @@ --module(test). --export([listen/1, - handle_client/1, - maintain_clients/1, - start/1, - stop/0, - controller/1]). - --author("jerith"). - --define(TCP_OPTIONS,[list, {packet, 0}, {active, false}, {reuseaddr, true}]). - --record(player, {name=none, socket, mode}). - -%% To allow incoming connections, we need to listen on a TCP port. -%% This is also the entry point for our server as a whole, so it -%% starts the client_manager process and gives it a name so the rest -%% of the code can get to it easily. - -listen(Port) -> - {ok, LSocket} = gen_tcp:listen(Port, ?TCP_OPTIONS), - register(client_manager, spawn(?MODULE, maintain_clients, [[]])), - do_accept(LSocket). - -%% Accepting a connection gives us a connection socket with the -%% newly-connected client on the other end. Since we want to accept -%% more than one client, we spawn a new process for each and then wait -%% for another connection on our listening socket. - -do_accept(LSocket) -> - case gen_tcp:accept(LSocket) of - {ok, Socket} -> - spawn(?MODULE, handle_client, [Socket]), - client_manager ! {connect, Socket}; - {error, Reason} -> - io:format("Socket accept error: ~s~n", [Reason]) - end, - do_accept(LSocket). - -%% All the client-socket process needs to do is wait for data and -%% forward it to the client_manager process which decides what to do -%% with it. If the client disconnects, we let client_manager know and -%% then quietly go away. - -handle_client(Socket) -> - case gen_tcp:recv(Socket, 0) of - {ok, Data} -> - client_manager ! {data, Socket, Data}, - handle_client(Socket); - {error, closed} -> - client_manager ! {disconnect, Socket} - end. - -%% This is the main loop of the client_manager process. It maintains -%% the list of "players" and calls the handler for client input. - -maintain_clients(Players) -> - io:format("Players:~n", []), - lists:foreach(fun(P) -> io:format(">>> ~w~n", [P]) end, Players), - receive - {connect, Socket} -> - Player = #player{socket=Socket, mode=connect}, - send_prompt(Player), - io:format("client connected: ~w~n", [Player]), - NewPlayers = [Player | Players]; - {disconnect, Socket} -> - Player = find_player(Socket, Players), - io:format("client disconnected: ~w~n", [Player]), - NewPlayers = lists:delete(Player, Players); - {data, Socket, Data} -> - Player = find_player(Socket, Players), - NewPlayers = parse_data(Player, Players, Data), - NewPlayer = find_player(Socket, NewPlayers), - send_prompt(NewPlayer) - end, - maintain_clients(NewPlayers). - -%% find_player is a utility function to get a player record associated -%% with a particular socket out of the player list. - -find_player(Socket, Players) -> - {value, Player} = lists:keysearch(Socket, #player.socket, Players), - Player. - -%% delete_player returns the player list without the given player. It -%% deletes the player from the list based on the socket rather than -%% the whole record because the list might hold a different version. - -delete_player(Player, Players) -> - lists:keydelete(Player#player.socket, #player.socket, Players). - -%% Sends an appropriate prompt to the player. Currently the only -%% prompt we send is the initial "Name: " when the player connects. - -send_prompt(Player) -> - case Player#player.mode of - connect -> - gen_tcp:send(Player#player.socket, "Name: "); - active -> - ok - end. - -%% Sends the given data to all players in active mode. - -send_to_active(Prefix, Players, Data) -> - ActivePlayers = lists:filter(fun(P) -> P#player.mode == active end, - Players), - lists:foreach(fun(P) -> gen_tcp:send(P#player.socket, Prefix ++ Data) end, - ActivePlayers), - ok. - -%% We don't really do much parsing, but that will probably change as -%% more features are added. Currently this handles naming the player -%% when he first connects and treats everything else as a message to -%% send. - -parse_data(Player, Players, Data) -> - case Player#player.mode of - active -> - send_to_active(Player#player.name ++ ": ", - delete_player(Player, Players), Data), - Players; - connect -> - UPlayer = Player#player{name=bogostrip(Data), mode=active}, - [UPlayer | delete_player(Player, Players)] - end. - -%% Utility methods to clean up the name before we apply it. Called -%% bogostrip rather than strip because it returns the first continuous -%% block of non-matching characters rather stripping matching -%% characters off the front and back. - -bogostrip(String) -> - bogostrip(String, "\r\n\t "). - -bogostrip(String, Chars) -> - LStripped = string:substr(String, string:span(String, Chars)+1), - string:substr(LStripped, 1, string:cspan(LStripped, Chars)). - -%% Here we have some extra code to test other bits of pygments' Erlang -%% lexer. - -get_timestamp() -> - {{Year,Month,Day},{Hour,Min,Sec}} = erlang:universaltime(), - lists:flatten(io_lib:format( - "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ", - [Year, Month, Day, Hour, Min, Sec])). - -a_binary() -> - << 100:16/integer, 16#7f >>. - -a_list_comprehension() -> - [X*2 || X <- [1,2,3]]. - -a_map() -> - M0 = #{ a => 1, b => 2 }, - M1 = M0#{ b := 200 }. - -escape_sequences() -> - [ "\b\d\e\f\n\r\s\t\v\'\"\\" - , "\1\12\123" % octal - , "\x01" % short hex - , "\x{fff}" % long hex - , "\^a\^A" % control characters - ]. - -map(Fun, [H|T]) -> - [Fun(H) | map(Fun, T)]; - -map(Fun, []) -> - []. - -%% pmap, just because it's cool. - -pmap(F, L) -> - Parent = self(), - [receive {Pid, Result} -> - Result - end || Pid <- [spawn(fun() -> - Parent ! {self(), F(X)} - end) || X <- L]]. diff --git a/tests/examplefiles/test.escript b/tests/examplefiles/test.escript deleted file mode 100644 index 3fafb803..00000000 --- a/tests/examplefiles/test.escript +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env escript - -main(_Args) -> - ok. diff --git a/tests/examplefiles/test.evoque b/tests/examplefiles/test.evoque deleted file mode 100644 index 5a98d3bb..00000000 --- a/tests/examplefiles/test.evoque +++ /dev/null @@ -1,33 +0,0 @@ -$overlay{name=site_base} - -$begin{table_row} - $for{ col in row } - ${col}\ - $else - empty row - $rof -$end{table_row} - - - $for{ i, row in enumerate(rows) } - #[ "odd" rows get a special style ]# - $evoque{#table_row} - $evoque{ - #table_row - } - $evoque{'#table_row'} - $evoque{ '#table_row', collection=None, quoting="str"} - $evoque{name="#table_row"} - $evoque{name=var_table_row} - $evoque{%#table_row%} - $evoque{% #table_row %} - - $rof -
    - -$evoque{disclaimer, collection="legals"} -$evoque{ disclaimer , collection="legals", abc=123} -$evoque{% disclaimer, collection="legals"%} - -$test{% site_base="site.html", - rows=[("a", "b", 3.0, {"one":1}, "", "i", "j")] %} diff --git a/tests/examplefiles/test.fan b/tests/examplefiles/test.fan deleted file mode 100755 index 00e80b60..00000000 --- a/tests/examplefiles/test.fan +++ /dev/null @@ -1,818 +0,0 @@ -// -// Copyright (c) 2008, Brian Frank and Andy Frank -// Licensed under the Academic Free License version 3.0 -// -// History: -// 17 Nov 08 Brian Frank Creation -// - -using compiler - -** -** JavaBridge is the compiler plugin for bringing Java -** classes into the Fantom type system. -** -class JavaBridge : CBridge -{ - -////////////////////////////////////////////////////////////////////////// -// Constructor -////////////////////////////////////////////////////////////////////////// - - ** - ** Construct a JavaBridge for current environment - ** - new make(Compiler c, ClassPath cp := ClassPath.makeForCurrent) - : super(c) - { - this.cp = cp - } - -////////////////////////////////////////////////////////////////////////// -// Namespace -////////////////////////////////////////////////////////////////////////// - - ** - ** Map a FFI "podName" to a Java package. - ** - override CPod resolvePod(Str name, Loc? loc) - { - // the empty package is used to represent primitives - if (name == "") return primitives - - // look for package name in classpatch - classes := cp.classes[name] - if (classes == null) - throw CompilerErr("Java package '$name' not found", loc) - - // map package to JavaPod - return JavaPod(this, name, classes) - } - - ** - ** Map class meta-data and Java members to Fantom slots - ** for the specified JavaType. - ** - virtual Void loadType(JavaType type, Str:CSlot slots) - { - JavaReflect.loadType(type, slots) - } - -////////////////////////////////////////////////////////////////////////// -// Call Resolution -////////////////////////////////////////////////////////////////////////// - - ** - ** Resolve a construction call to a Java constructor. - ** - override Expr resolveConstruction(CallExpr call) - { - // if the last argument is an it-block, then we know - // right away that we will not be passing it thru to Java, - // so strip it off to be appended as call to Obj.with - itBlock := call.args.last as ClosureExpr - if (itBlock != null && itBlock.isItBlock) - call.args.removeAt(-1) - else - itBlock = null - - // if this is an interop array like IntArray/int[] use make - // factory otherwise look for Java constructor called - JavaType base := call.target.ctype - if (base.isInteropArray) - call.method = base.method("make") - else - call.method = base.method("") - - // call resolution to deal with overloading - call = resolveCall(call) - - // we need to create an implicit target for the Java runtime - // to perform the new opcode to ensure it is on the stack - // before the args (we don't do this for interop Array classes) - if (!base.isInteropArray) - { - loc := call.loc - call.target = CallExpr.makeWithMethod(loc, null, base.newMethod) { synthetic=true } - } - - // if we stripped an it-block argument, - // add it as trailing call to Obj.with - if (itBlock != null) return itBlock.toWith(call) - return call - } - - ** - ** Resolve a construction chain call where a Fantom constructor - ** calls the super-class constructor. Type check the arguments - ** and insert any conversions needed. - ** - override Expr resolveConstructorChain(CallExpr call) - { - // we don't allow chaining to a this ctor for Java FFI - if (call.target.id !== ExprId.superExpr) - throw err("Must use super constructor call in Java FFI", call.loc) - - // route to a superclass constructor - JavaType base := call.target.ctype.deref - call.method = base.method("") - - // call resolution to deal with overloading - return resolveCall(call) - } - - ** - ** Given a dot operator slot access on the given foreign - ** base type, determine the appopriate slot to use based on - ** whether parens were used - ** base.name => noParens = true - ** base.name() => noParens = false - ** - ** In Java a given name could be bound to both a field and - ** a method. In this case we only resolve the field if - ** no parens are used. We also handle the special case of - ** Java annotations here because their element methods are - ** also mapped as Fantom fields (instance based mixin field). - ** - override CSlot? resolveSlotAccess(CType base, Str name, Bool noParens) - { - // first try to resolve as a field - field := base.field(name) - if (field != null) - { - // if no () we used and this isn't an annotation field - if (noParens && (field.isStatic || !base.isMixin)) - return field - - // if we did find a field, then make sure we use that - // field's parent type to resolve a method (becuase the - // base type might be a sub-class of a Java type in which - // case it is unware of field/method overloads) - return field.parent.method(name) - } - - // lookup method - return base.method(name) - } - - ** - ** Resolve a method call: try to find the best match - ** and apply any coercions needed. - ** - override CallExpr resolveCall(CallExpr call) - { - // try to match against all the overloaded methods - matches := CallMatch[,] - CMethod? m := call.method - while (m != null) - { - match := matchCall(call, m) - if (match != null) matches.add(match) - m = m is JavaMethod ? ((JavaMethod)m).next : null - } - - // if we have exactly one match use then use that one - if (matches.size == 1) return matches[0].apply(call) - - // if we have multiple matches; resolve to - // most specific match according to JLS rules - // TODO: this does not correct resolve when using Fantom implicit casting - if (matches.size > 1) - { - best := resolveMostSpecific(matches) - if (best != null) return best.apply(call) - } - - // zero or multiple ambiguous matches is a compiler error - s := StrBuf() - s.add(matches.isEmpty ? "Invalid args " : "Ambiguous call ") - s.add(call.name).add("(") - s.add(call.args.join(", ") |Expr arg->Str| { return arg.toTypeStr }) - s.add(")") - throw err(s.toStr, call.loc) - } - - ** - ** Check if the call matches the specified overload method. - ** If so return method and coerced args otherwise return null. - ** - internal CallMatch? matchCall(CallExpr call, CMethod m) - { - // first check if have matching numbers of args and params - args := call.args - if (m.params.size < args.size) return null - - // check if each argument is ok or can be coerced - isErr := false - newArgs := args.dup - m.params.each |CParam p, Int i| - { - if (i >= args.size) - { - // param has a default value, then that is ok - if (!p.hasDefault) isErr = true - } - else - { - // ensure arg fits parameter type (or auto-cast) - newArgs[i] = coerce(args[i], p.paramType) |->| { isErr = true } - } - } - if (isErr) return null - return CallMatch { it.method = m; it.args = newArgs } - } - - ** - ** Given a list of overloaed methods find the most specific method - ** according to Java Language Specification 15.11.2.2. The "informal - ** intuition" rule is that a method is more specific than another - ** if the first could be could be passed onto the second one. - ** - internal static CallMatch? resolveMostSpecific(CallMatch[] matches) - { - CallMatch? best := matches[0] - for (i:=1; iBool| - { - bp := b.method.params[i] - return ap.paramType.fits(bp.paramType) - } - } - -////////////////////////////////////////////////////////////////////////// -// Overrides -////////////////////////////////////////////////////////////////////////// - - ** - ** Called during Inherit step when a Fantom slot overrides a FFI slot. - ** Log and throw compiler error if there is a problem. - ** - override Void checkOverride(TypeDef t, CSlot base, SlotDef def) - { - // we don't allow Fantom to override Java methods with multiple - // overloaded versions since the Fantom type system can't actually - // override all the overloaded versions - jslot := base as JavaSlot - if (jslot?.next != null) - throw err("Cannot override Java overloaded method: '$jslot.name'", def.loc) - - // route to method override checking - if (base is JavaMethod && def is MethodDef) - checkMethodOverride(t, base, def) - } - - ** - ** Called on method/method overrides in the checkOverride callback. - ** - private Void checkMethodOverride(TypeDef t, JavaMethod base, MethodDef def) - { - // bail early if we know things aren't going to work out - if (base.params.size != def.params.size) return - - // if the return type is primitive or Java array and the - // Fantom declaration matches how it is inferred into the Fan - // type system, then just change the return type - the compiler - // will impliclty do all the return coercions - if (isOverrideInferredType(base.returnType, def.returnType)) - { - def.ret = def.inheritedRet = base.returnType - } - - // if any of the parameters is a primitive or Java array - // and the Fantom declaration matches how it is inferred into - // the Fantom type type, then change the parameter type to - // the Java override type and make the Fantom type a local - // variable: - // Java: void foo(int a) { ... } - // Fantom: Void foo(Int a) { ... } - // Result: Void foo(int a_$J) { Int a := a_$J; ... } - // - base.params.eachr |CParam bp, Int i| - { - dp := def.paramDefs[i] - if (!isOverrideInferredType(bp.paramType, dp.paramType)) return - - // add local variable: Int bar := bar_$J - local := LocalDefStmt(def.loc) - local.ctype = dp.paramType - local.name = dp.name - local.init = UnknownVarExpr(def.loc, null, dp.name + "_\$J") - def.code.stmts.insert(0, local) - - // rename parameter Int bar -> int bar_$J - dp.name = dp.name + "_\$J" - dp.paramType = bp.paramType - } - } - - ** - ** When overriding a Java method check if the base type is - ** is a Java primitive or array and the override definition is - ** matches how the Java type is inferred in the Fantom type system. - ** If we have a match return true and we'll swizzle things in - ** checkMethodOverride. - ** - static private Bool isOverrideInferredType(CType base, CType def) - { - // check if base class slot is a JavaType - java := base.toNonNullable as JavaType - if (java != null) - { - // allow primitives is it matches the inferred type - if (java.isPrimitive) return java.inferredAs == def - - // allow arrays if mapped as Foo[] -> Foo?[]? - if (java.isArray) return java.inferredAs == def.toNonNullable && def.isNullable - } - return false - } - -////////////////////////////////////////////////////////////////////////// -// CheckErrors -////////////////////////////////////////////////////////////////////////// - - ** - ** Called during CheckErrors step for a type which extends - ** a FFI class or implements any FFI mixins. - ** - override Void checkType(TypeDef def) - { - // can't subclass a primitive array like ByteArray/byte[] - if (def.base.deref is JavaType && def.base.deref->isInteropArray) - { - err("Cannot subclass from Java interop array: $def.base", def.loc) - return - } - - // we don't allow deep inheritance of Java classes because - // the Fantom constructor and Java constructor model don't match - // up past one level of inheritance - // NOTE: that that when we remove this restriction we need to - // test how field initialization works because instance$init - // is almost certain to break with the current emit design - javaBase := def.base - while (javaBase != null && !javaBase.isForeign) javaBase = javaBase.base - if (javaBase != null && javaBase !== def.base) - { - err("Cannot subclass Java class more than one level: $javaBase", def.loc) - return - } - - // ensure that when we map Fantom constructors to Java - // constructors that we don't have duplicate signatures - ctors := def.ctorDefs - ctors.each |MethodDef a, Int i| - { - ctors.each |MethodDef b, Int j| - { - if (i > j && areParamsSame(a, b)) - err("Duplicate Java FFI constructor signatures: '$b.name' and '$a.name'", a.loc) - } - } - } - - ** - ** Do the two methods have the exact same parameter types. - ** - static Bool areParamsSame(CMethod a, CMethod b) - { - if (a.params.size != b.params.size) return false - for (i:=0; i| { fits=false } - return fits - } - - ** - ** Coerce expression to expected type. If not a type match - ** then run the onErr function. - ** - override Expr coerce(Expr expr, CType expected, |->| onErr) - { - // handle easy case - actual := expr.ctype - expected = expected.deref - if (actual == expected) return expr - - // handle null literal - if (expr.id === ExprId.nullLiteral && expected.isNullable) - return expr - - // handle Fantom to Java primitives - if (expected.pod == primitives) - return coerceToPrimitive(expr, expected, onErr) - - // handle Java primitives to Fan - if (actual.pod == primitives) - return coerceFromPrimitive(expr, expected, onErr) - - // handle Java array to Fantom list - if (actual.name[0] == '[') - return coerceFromArray(expr, expected, onErr) - - // handle Fantom list to Java array - if (expected.name[0] == '[') - return coerceToArray(expr, expected, onErr) - - // handle sys::Func -> Java interface - if (actual is FuncType && expected.isMixin && expected.toNonNullable is JavaType) - return coerceFuncToInterface(expr, expected.toNonNullable, onErr) - - // handle special classes and interfaces for built-in Fantom - // classes which actually map directly to Java built-in types - if (actual.isBool && boolTypes.contains(expected.toNonNullable.signature)) return box(expr) - if (actual.isInt && intTypes.contains(expected.toNonNullable.signature)) return box(expr) - if (actual.isFloat && floatTypes.contains(expected.toNonNullable.signature)) return box(expr) - if (actual.isDecimal && decimalTypes.contains(expected.toNonNullable.signature)) return expr - if (actual.isStr && strTypes.contains(expected.toNonNullable.signature)) return expr - - // use normal Fantom coercion behavior - return super.coerce(expr, expected, onErr) - } - - ** - ** Ensure value type is boxed. - ** - private Expr box(Expr expr) - { - if (expr.ctype.isVal) - return TypeCheckExpr.coerce(expr, expr.ctype.toNullable) - else - return expr - } - - ** - ** Coerce a fan expression to a Java primitive (other - ** than the ones we support natively) - ** - Expr coerceToPrimitive(Expr expr, JavaType expected, |->| onErr) - { - actual := expr.ctype - - // sys::Int (long) -> int, short, byte - if (actual.isInt && expected.isPrimitiveIntLike) - return TypeCheckExpr.coerce(expr, expected) - - // sys::Float (double) -> float - if (actual.isFloat && expected.isPrimitiveFloat) - return TypeCheckExpr.coerce(expr, expected) - - // no coercion - type error - onErr() - return expr - } - - ** - ** Coerce a Java primitive to a Fantom type. - ** - Expr coerceFromPrimitive(Expr expr, CType expected, |->| onErr) - { - actual := (JavaType)expr.ctype - - // int, short, byte -> sys::Int (long) - if (actual.isPrimitiveIntLike) - { - if (expected.isInt || expected.isObj) - return TypeCheckExpr.coerce(expr, expected) - } - - // float -> sys::Float (float) - if (actual.isPrimitiveFloat) - { - if (expected.isFloat || expected.isObj) - return TypeCheckExpr.coerce(expr, expected) - } - - // no coercion - type error - onErr() - return expr - } - - ** - ** Coerce a Java array to a Fantom list. - ** - Expr coerceFromArray(Expr expr, CType expected, |->| onErr) - { - actual := (JavaType)expr.ctype.toNonNullable - - // if expected is array type - if (expected is JavaType && ((JavaType)expected).isArray) - if (actual.arrayOf.fits(((JavaType)expected).arrayOf)) return expr - - // if expected is Obj - if (expected.isObj) return arrayToList(expr, actual.inferredArrayOf) - - // if expected is list type - if (expected.toNonNullable is ListType) - { - expectedOf := ((ListType)expected.toNonNullable).v - if (actual.inferredArrayOf.fits(expectedOf)) return arrayToList(expr, expectedOf) - } - - // no coercion available - onErr() - return expr - } - - ** - ** Generate List.make(of, expr) where expr is Object[] - ** - private Expr arrayToList(Expr expr, CType of) - { - loc := expr.loc - ofExpr := LiteralExpr(loc, ExprId.typeLiteral, ns.typeType, of) - call := CallExpr.makeWithMethod(loc, null, listMakeFromArray, [ofExpr, expr]) - call.synthetic = true - return call - } - - ** - ** Coerce a Fantom list to Java array. - ** - Expr coerceToArray(Expr expr, CType expected, |->| onErr) - { - loc := expr.loc - expectedOf := ((JavaType)expected.toNonNullable).inferredArrayOf - actual := expr.ctype - - // if actual is list type - if (actual.toNonNullable is ListType) - { - actualOf := ((ListType)actual.toNonNullable).v - if (actualOf.fits(expectedOf)) - { - // (Foo[])list.asArray(cls) - clsLiteral := CallExpr.makeWithMethod(loc, null, JavaType.classLiteral(this, expectedOf)) - asArray := CallExpr.makeWithMethod(loc, expr, listAsArray, [clsLiteral]) - return TypeCheckExpr.coerce(asArray, expected) - } - } - - // no coercion available - onErr() - return expr - } - - ** - ** Attempt to coerce a parameterized sys::Func expr to a Java - ** interface if the interface supports exactly one matching method. - ** - Expr coerceFuncToInterface(Expr expr, JavaType expected, |->| onErr) - { - // check if we have exactly one abstract method in the expected type - loc := expr.loc - abstracts := expected.methods.findAll |CMethod m->Bool| { return m.isAbstract } - if (abstracts.size != 1) { onErr(); return expr } - method := abstracts.first - - // check if we have a match - FuncType funcType := (FuncType)expr.ctype - if (!isFuncToInterfaceMatch(funcType, method)) { onErr(); return expr } - - // check if we've already generated a wrapper for this combo - key := "${funcType.signature}+${method.qname}" - ctor := funcWrappers[key] - if (ctor == null) - { - ctor = generateFuncToInterfaceWrapper(expr.loc, funcType, expected, method) - funcWrappers[key] = ctor - } - - // replace expr with FuncWrapperX(expr) - call := CallExpr.makeWithMethod(loc, null, ctor, [expr]) - call.synthetic = true - return call - } - - ** - ** Return if the specified function type can be used to implement - ** the specified interface method. - ** - Bool isFuncToInterfaceMatch(FuncType funcType, CMethod method) - { - // sanity check to map to callX method - can't handle more than 8 args - if (method.params.size > 8) return false - - // check if method is match for function; first check is that - // method must supply all the arguments required by the function - if (funcType.params.size > method.params.size) return false - - // check that func return type fits method return - retOk := method.returnType.isVoid || fits(funcType.ret, method.returnType) - if (!retOk) return false - - // check all the method parameters fit the function parameters - paramsOk := funcType.params.all |CType f, Int i->Bool| { return fits(f, method.params[i].paramType) } - if (!paramsOk) return false - - return true - } - - ** - ** Generate the wrapper which implements the specified expected interface - ** and overrides the specified method which calls the function. - ** - CMethod generateFuncToInterfaceWrapper(Loc loc, FuncType funcType, CType expected, CMethod method) - { - // Fantom: func typed as |Str| - // Java: interface Foo { void bar(String) } - // Result: FuncWrapperX(func) - // - // class FuncWrapperX : Foo - // { - // new make(Func f) { _func = f } - // override Void bar(Str a) { _func.call(a) } - // Func _func - // } - - // generate FuncWrapper class - name := "FuncWrapper" + funcWrappers.size - cls := TypeDef(ns, loc, compiler.types[0].unit, name, FConst.Internal + FConst.Synthetic) - cls.base = ns.objType - cls.mixins = [expected] - addTypeDef(cls) - - // generate FuncWrapper._func field - field := FieldDef(loc, cls) - ((SlotDef)field).name = "_func" - ((DefNode)field).flags = FConst.Private + FConst.Storage + FConst.Synthetic - field.fieldType = funcType - cls.addSlot(field) - - // generate FuncWrapper.make constructor - ctor := MethodDef(loc, cls, "make", FConst.Internal + FConst.Ctor + FConst.Synthetic) - ctor.ret = ns.voidType - ctor.paramDefs = [ParamDef(loc, funcType, "f")] - ctor.code = Block.make(loc) - ctor.code.stmts.add(BinaryExpr.makeAssign( - FieldExpr(loc, ThisExpr(loc), field), - UnknownVarExpr(loc, null, "f")).toStmt) - ctor.code.stmts.add(ReturnStmt.make(loc)) - cls.addSlot(ctor) - - // generate FuncWrapper override of abstract method - over := MethodDef(loc, cls, method.name, FConst.Public + FConst.Override + FConst.Synthetic) - over.ret = method.returnType - over.paramDefs = ParamDef[,] - over.code = Block.make(loc) - callArity := "call" - call := CallExpr.makeWithMethod(loc, FieldExpr(loc, ThisExpr(loc), field), funcType.method(callArity)) - method.params.each |CParam param, Int i| - { - paramName := "p$i" - over.params.add(ParamDef(loc, param.paramType, paramName)) - if (i < funcType.params.size) - call.args.add(UnknownVarExpr(loc, null, paramName)) - } - if (method.returnType.isVoid) - over.code.stmts.add(call.toStmt).add(ReturnStmt(loc)) - else - over.code.stmts.add(ReturnStmt(loc, call)) - cls.addSlot(over) - - // return the ctor which we use for coercion - return ctor - } - -////////////////////////////////////////////////////////////////////////// -// Reflection -////////////////////////////////////////////////////////////////////////// - - ** - ** Get a CMethod representation for 'List.make(Type, Object[])' - ** - once CMethod listMakeFromArray() - { - return JavaMethod( - this.ns.listType, - "make", - FConst.Public + FConst.Static, - this.ns.listType.toNullable, - [ - JavaParam("of", this.ns.typeType), - JavaParam("array", objectArrayType) - ]) - } - - ** - ** Get a CMethod representation for 'Object[] List.asArray()' - ** - once CMethod listAsArray() - { - return JavaMethod( - this.ns.listType, - "asArray", - FConst.Public, - objectArrayType, - [JavaParam("cls", classType)]) - } - - ** - ** Get a CType representation for 'java.lang.Class' - ** - once JavaType classType() - { - return ns.resolveType("[java]java.lang::Class") - } - - ** - ** Get a CType representation for 'java.lang.Object[]' - ** - once JavaType objectArrayType() - { - return ns.resolveType("[java]java.lang::[Object") - } - -////////////////////////////////////////////////////////////////////////// -// Fields -////////////////////////////////////////////////////////////////////////// - - const static Str[] boolTypes := Str[ - "[java]java.io::Serializable", - "[java]java.lang::Comparable", - ] - - const static Str[] intTypes := Str[ - "[java]java.lang::Number", - "[java]java.io::Serializable", - "[java]java.lang::Comparable", - ] - - const static Str[] floatTypes := Str[ - "[java]java.lang::Number", - "[java]java.io::Serializable", - "[java]java.lang::Comparable", - ] - - const static Str[] decimalTypes := Str[ - "[java]java.lang::Number", - "[java]java.io::Serializable", - "[java]java.lang::Comparable", - ] - - const static Str[] strTypes := Str[ - "[java]java.io::Serializable", - "[java]java.lang::CharSequence", - "[java]java.lang::Comparable", - ] - - JavaPrimitives primitives := JavaPrimitives(this) - ClassPath cp - - private Str:CMethod funcWrappers := Str:CMethod[:] // funcType+method:ctor - -} - -************************************************************************** -** CallMatch -************************************************************************** - -internal class CallMatch -{ - CallExpr apply(CallExpr call) - { - call.args = args - call.method = method - call.ctype = method.isCtor ? method.parent : method.returnType - return call - } - - override Str toStr() { return method.signature } - - CMethod? method // matched method - Expr[]? args // coerced arguments -} \ No newline at end of file diff --git a/tests/examplefiles/test.flx b/tests/examplefiles/test.flx deleted file mode 100644 index 4c8a667b..00000000 --- a/tests/examplefiles/test.flx +++ /dev/null @@ -1,57 +0,0 @@ -type tiny = "%i8"; -type int = "%i32"; -typedef bool = 2; -fun add : int*int -> int = "%add"; -fun sub : int*int -> int = "%sub"; -fun eq : int*int -> bool = "%eq"; -fun lnot : bool -> bool = "%lnot"; -proc exit : int = "exit"; - -// comment 1 -/* - /* - foo bar - */ -asdas -*/ - -noinline fun foo (x:int) = { - val y = 6; - return x + y; -} - -noinline proc fake_exit (x:int) { - exit x; - return; -} - -noinline fun bar (x:int) = { - var y = 10; - noinline proc baz () { - y = 20; - return; - } - baz (); - return x + y; -} - -noinline fun x (a:int, b:int, c:tiny) = { - val x1 = a; - val x2 = b; - val x3 = c; - noinline fun y (d:int, e:int, f:tiny) = { - val y1 = x1; - val y2 = x2; - val y3 = f; - noinline fun z (g:int, h:int, i:tiny) = { - val z1 = x1; - val z2 = x2; - val z3 = i; - return z1; - } - return z (y1,y2,y3); - } - return y (x1,x2,x3); -} - -fake_exit $ (foo 2) + (bar 3) + (x (1,2,3t)); diff --git a/tests/examplefiles/test.gdc b/tests/examplefiles/test.gdc deleted file mode 100644 index c7e36b81..00000000 --- a/tests/examplefiles/test.gdc +++ /dev/null @@ -1,13 +0,0 @@ -# Execute the date dimension MAQL script -ExecuteMaql(maqlFile="examples/quotes/quote_date.maql"); - -# load the stock quotes data file -# the data file config has been generated -LoadCsv(csvDataFile="examples/quotes/quotes.csv", - header="true", - configFile="examples/quotes/quotes.config.xml"); - -# transfer the stock quotes data -TransferLastSnapshot(); - -LoadGoogleAnalytics(configFile="examples/ga/ga.config.xml",username="example@gmail.com",password="******",profileId="ga:7468896",dimensions="ga:date|ga:browser|ga:browserVersion|ga:country|ga:isMobile",metrics="ga:bounces|ga:newVisits|ga:pageViews|ga:visits",startDate="2008-01-01",endDate="2010-06-15"); diff --git a/tests/examplefiles/test.gradle b/tests/examplefiles/test.gradle deleted file mode 100644 index 0bc834c1..00000000 --- a/tests/examplefiles/test.gradle +++ /dev/null @@ -1,20 +0,0 @@ -apply plugin: 'java' - -repositories { - mavenCentral() -} - -dependencies { - testCompile 'junit:junit:4.12' -} - -task sayHello << { - def x = SomeClass.worldString - println "Hello ${x}" -} - -private class SomeClass { - public static String getWorldString() { - return "world" - } -} diff --git a/tests/examplefiles/test.groovy b/tests/examplefiles/test.groovy deleted file mode 100644 index 903863d2..00000000 --- a/tests/examplefiles/test.groovy +++ /dev/null @@ -1,97 +0,0 @@ -// This source code comes from http://www.odelia-technologies.com/node/200 - -package com.odelia.groovy.simpleworkflow - - -class SimpleWorkflowEngine { - def workflowMap = [:] - def context = [:] - def beforeActivityName = 'beforeActivity' - def afterActivityName = 'afterActivity' - - SimpleWorkflowEngine(workflow, context = [:]) { - this.context = context - parseWorkflow(workflow) - } - - def parseWorkflow(workflow) { - workflowMap = new WorkflowParser().parse(workflow) - } - - def getActivityValue(activity) { - assert activity instanceof String - if (!workflowMap[activity]) - throw new RuntimeException("$activity activity doesn't exist") - workflowMap[activity] - } - - def execute(activity, pause) { - if (workflowMap[beforeActivityName]) { - getActivityValue(beforeActivityName)(context, activity) - } - - def activityValue = getActivityValue(activity) - - // Determine the next activity to execute - def nextActivity - switch (activityValue) { - case String: nextActivity = activityValue; break - case Closure: nextActivity = activityValue(context); break - case Class: nextActivity = activityValue.newInstance()(context) - } - - if (workflowMap[afterActivityName]) { - getActivityValue(afterActivityName)(context, activity, nextActivity) - } - - if (!pause && nextActivity) - call(nextActivity) - else - nextActivity - } - - def call(activity) { - execute(activity, false) - } - - def nextActivity(activity) { - execute(activity, true) - } - - static void main(String[] args) { - if (args.size() != 2) { - println 'Usage: com.odelia.groovy.simpleworkflow.SimpleWorkflowEngine ' - return - } - SimpleWorkflowEngine.newInstance(new File(args[0]))(args[1]) - } - -} - -private class WorkflowParser { - def map = [:] - - def methodMissing(String name, args) { - map[name] = args[0] - } - - def parse(Closure wf) { - wf.delegate = this - wf.resolveStrategy = Closure.DELEGATE_FIRST - wf() - map - } - - def workflow = { it -> - it.delegate = this - it.resolveStrategy = Closure.DELEGATE_FIRST - it() - } - - def parse(File workflowDef) { - def binding = new Binding([workflow: workflow]) - def shell = new GroovyShell(binding) - shell.evaluate(workflowDef) - map - } -} \ No newline at end of file diff --git a/tests/examplefiles/test.hsail b/tests/examplefiles/test.hsail deleted file mode 100644 index f9c25091..00000000 --- a/tests/examplefiles/test.hsail +++ /dev/null @@ -1,62 +0,0 @@ -module &__llvm_hsail_module:1:0:$full:$large:$near; - -prog kernel &mmul2d( - kernarg_u64 %__arg_p0, - kernarg_u64 %__arg_p1, - kernarg_u64 %__arg_p2, - kernarg_u64 %__arg_p3) -{ - pragma "AMD RTI", "ARGSTART:mmul2d"; - pragma "AMD RTI", "version:3:1:104"; - pragma "AMD RTI", "device:generic"; - pragma "AMD RTI", "uniqueid:1025"; - pragma "AMD RTI", "function:1:0"; - pragma "AMD RTI", "memory:64bitABI"; - pragma "AMD RTI", "privateid:1"; - pragma "AMD RTI", "ARGEND:mmul2d"; - // BB#0: // %top - mov_f64 $d1, 0.0E+0; - gridsize_u32 $s0, 0; - workitemabsid_u32 $s1, 1; - workitemabsid_u32 $s2, 0; - cvt_u64_u32 $d0, $s2; - cvt_u64_u32 $d3, $s1; - cvt_u64_u32 $d4, $s0; - ld_kernarg_align(8)_width(all)_u64 $d2, [%__arg_p2]; - ld_kernarg_align(8)_width(all)_u64 $d6, [%__arg_p1]; - ld_kernarg_align(8)_width(all)_u64 $d5, [%__arg_p3]; - ld_kernarg_align(8)_width(all)_u64 $d7, [%__arg_p0]; - cmp_lt_b1_s64 $c0, $d5, 1; - cbr_b1 $c0, @BB0_3; - // BB#1: // %L.preheader - mul_u64 $d1, $d5, $d3; - shl_u64 $d1, $d1, 3; - shl_u64 $d8, $d0, 3; - add_u64 $d8, $d7, $d8; - add_u64 $d6, $d6, $d1; - shl_u64 $d7, $d4, 3; - mov_f64 $d1, 0D0000000000000000; - -@BB0_2: - // %L - add_u64 $d9, $d8, $d7; - ld_global_f64 $d8, [$d8]; - ld_global_f64 $d10, [$d6]; - mul_f64 $d8, $d8, $d10; - add_f64 $d1, $d1, $d8; - add_u64 $d6, $d6, 8; - add_u64 $d5, $d5, 18446744073709551615; - cmp_ne_b1_s64 $c0, $d5, 0; - mov_b64 $d8, $d9; - cbr_b1 $c0, @BB0_2; - -@BB0_3: - // %L.7 - mul_u64 $d3, $d3, $d4; - add_u64 $d0, $d3, $d0; - shl_u64 $d0, $d0, 3; - add_u64 $d0, $d2, $d0; - st_global_f64 $d1, [$d0]; - ret; -}; - diff --git a/tests/examplefiles/test.html b/tests/examplefiles/test.html deleted file mode 100644 index b22f0c61..00000000 --- a/tests/examplefiles/test.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - -
    # -*- coding: utf-8 -*-
    -"""
    -    pocoo.pkg.core.acl
    -    ~~~~~~~~~~~~~~~~~~
    -
    -    Pocoo ACL System.
    -
    -"""
    -
    -from pocoo.db import meta
    -
    -from pocoo.pkg.core.forum import Site, Forum, Thread
    -from pocoo.pkg.core.user import User, Group
    -
    -from pocoo.pkg.core.db import users, groups, group_members, privileges, \
    -     forums, posts, acl_mapping, acl_subjects, acl_objects
    -
    -
    -class AclManager(object):
    -    """
    -    Manager object to manage ALCs.
    -    """
    -    STRONG_NO = -1
    -
    -    WEAK_NO = 0
    -    WEAK_YES = 1
    -    STRONG_YES = 2
    -
    -    def __init__(self, ctx, subject):
    -        self.ctx = ctx
    -
    -        self.subject = subject
    -        if isinstance(subject, User):
    -            self._type = 'user'
    -
    -        elif isinstance(subject, Group):
    -            self._type = 'group'
    -
    -        else:
    -            raise ValueError('neither user or group specified')
    -
    -    def allow(self, privilege, obj, force=False):
    -        """Allows the subject privilege on obj."""
    -
    -        return self._set(privilege, obj, 1 + bool(force))
    -
    -    def default(self, privilege, obj):
    -        """Sets the state for privilege on obj back to weak yes."""
    -
    -        return self._set(privilege, obj, 0)
    -
    -    def deny(self, privilege, obj, force=False):
    -        """Denies the subject privilege on obj."""
    -
    -        return self._set(privilege, obj, -1 - bool(force))
    -
    -    def can_access(self, privilege, obj):
    -        """Checks if the current subject with the required privilege
    -        somehow. Either directly or when the subject is a user and
    -        one of its groups can access it."""
    -
    -        #XXX: maybe this could be one big query instead of 4
    -        #XXX: this currently does not work correctly, therefore return True
    -        return True
    -
    -        if not isinstance(obj, (Forum, Thread, Site.__class__)):
    -            raise TypeError('obj must be a forum, thread or site')
    -        privilege = privilege.upper()
    -        s = self._get_subject_join().alias('s').c
    -
    -        def do_check(obj, tendency):
    -            db = self.ctx.engine
    -
    -            o = self._get_object_join(obj).alias('o').c
    -
    -            # self check
    -            r = db.execute(meta.select([acl_mapping.c.state],
    -                (acl_mapping.c.priv_id == privileges.c.priv_id) &
    -
    -                (acl_mapping.c.subject_id == s.subject_id) &
    -                (acl_mapping.c.object_id == o.object_id) &
    -
    -                (privileges.c.name == privilege)
    -            ))
    -            row = r.fetchone()
    -            if row is not None:
    -                if row['state'] in (self.STRONG_NO, self.STRONG_YES):
    -                    return row['state'] == self.STRONG_YES
    -
    -                tendency = row['state']
    -
    -            # if the controlled subject is a user check all groups
    -            if isinstance(self.subject, User):
    -                r = db.execute(meta.select([acl_mapping.c.state],
    -                    (acl_mapping.c.object_id == o.object_id) &
    -
    -                    (acl_mapping.c.subject_id == groups.c.subject_id) &
    -
    -                    (groups.c.group_id == group_members.c.group_id) &
    -
    -                    (group_members.c.user_id == self.subject.user_id)
    -                ))
    -                while True:
    -                    row = r.fetchone()
    -                    if row is None:
    -                        break
    -
    -                    state = row[0]
    -                    if state in (self.STRONG_YES, self.STRONG_NO):
    -                        return state == self.STRONG_YES
    -
    -                    if tendency is None:
    -                        tendency = state
    -                    elif tendency == self.WEAK_NO and state == self.WEAK_YES:
    -                        tendency = self.WEAK_YES
    -
    -            # check related objects
    -            if isinstance(obj, Thread):
    -                return do_check(obj.forum, tendency)
    -            elif isinstance(obj, Forum):
    -                return do_check(Site, tendency)
    -            else:
    -                return tendency
    -
    -        return do_check(obj, None) in (self.WEAK_YES, self.STRONG_YES)
    -
    -    def _set(self, privilege, obj, state):
    -        """Helper functions for settings privileges."""
    -
    -        privilege = privilege.upper()
    -        if self.subject.subject_id is None:
    -            self._bootstrap()
    -        if obj.object_id is None:
    -            self._bootstrap_object(obj)
    -        # special state "0" which means delete
    -
    -        if not state:
    -            p = meta.select([privileges.c.priv_id], privileges.c.name == privilege)
    -            self.ctx.engine.execute(acl_mapping.delete(
    -                (acl_mapping.c.priv_id == p.c.priv_id) &
    -
    -                (acl_mapping.c.subject_id == self.subject.subject_id) &
    -
    -                (acl_mapping.c.object_id == obj.object_id)
    -            ))
    -            return
    -        # touch privilege and check existing mapping
    -
    -        priv_id = self._fetch_privilege(privilege)
    -        r = self.ctx.engine.execute(meta.select([acl_mapping.c.state],
    -            (acl_mapping.c.priv_id == priv_id) &
    -
    -            (acl_mapping.c.subject_id == self.subject.subject_id) &
    -
    -            (acl_mapping.c.object_id == obj.object_id)
    -        ))
    -        row = r.fetchone()
    -        if row is not None:
    -            # this rule exists already
    -
    -            if row['state'] == state:
    -                return
    -            # goddamn, same rule - different state, delete old first
    -            self._set(privilege, obj, 0)
    -        # insert new rule
    -
    -        self.ctx.engine.execute(acl_mapping.insert(),
    -            priv_id = priv_id,
    -            subject_id = self.subject.subject_id,
    -            object_id = obj.object_id,
    -            state = state
    -
    -        )
    -
    -    def _bootstrap(self):
    -        """This method is automatically called when subject_id is
    -        None and an subject_id is required."""
    -        r = self.ctx.engine.execute(acl_subjects.insert(),
    -            subject_type = self._type
    -
    -        )
    -        self.subject.subject_id = r.last_inserted_ids()[0]
    -        self.subject.save()
    -
    -    def _bootstrap_object(self, obj):
    -        """Like _bootstrap but works for objects."""
    -
    -        objtype = self._get_object_type(obj)
    -        r = self.ctx.engine.execute(acl_objects.insert(),
    -            object_type = objtype
    -
    -        )
    -        obj.object_id = r.last_inserted_ids()[0]
    -        obj.save()
    -
    -    def _get_object_type(self, obj):
    -        if isinstance(obj, Forum):
    -            return 'forum'
    -
    -        elif isinstance(obj, Thread):
    -            return 'thread'
    -        elif obj is Site:
    -            return 'site'
    -
    -        raise TypeError('obj isn\'t a forum or thread')
    -
    -    def _get_object_join(self, obj):
    -        """Returns a subjoin for the object id."""
    -
    -        t = self._get_object_type(obj)
    -        if t == 'forum':
    -            return meta.select([forums.c.object_id],
    -                forums.c.forum_id == obj.forum_id
    -
    -            )
    -        elif t == 'thread':
    -            return meta.select([posts.c.object_id],
    -                posts.c.post_id == obj.post_id
    -
    -            )
    -        else:
    -            # XXX: it works ^^
    -            # i really want something like meta.select('0 as group_id')
    -            class Fake(object):
    -                def alias(self, n):
    -                    class _C(object):
    -                        class c(object):
    -                            object_id = 0
    -
    -                    return _C
    -            return Fake()
    -
    -    def _get_subject_join(self):
    -        """Returns a subjoin for the subject id."""
    -
    -        if self._type == 'user':
    -            return meta.select([users.c.subject_id],
    -                users.c.user_id == self.subject.user_id
    -
    -            )
    -        return meta.select([groups.c.subject_id],
    -            groups.c.group_id == self.subject.group_id
    -
    -        )
    -
    -    def _fetch_privilege(self, name):
    -        """Returns the priv_id for the given privilege. If it
    -        doesn\'t exist by now the system will create a new
    -        privilege."""
    -        r = self.ctx.engine.execute(meta.select([privileges.c.priv_id],
    -            privileges.c.name == name
    -
    -        ))
    -        row = r.fetchone()
    -        if row is not None:
    -            return row[0]
    -        r = self.ctx.engine.execute(privileges.insert(),
    -            name = name
    -
    -        )
    -        return r.last_inserted_ids()[0]
    -
    -    def __repr__(self):
    -        if self._type == 'user':
    -            id_ = self.subject.user_id
    -
    -        else:
    -            id_ = self.subject.group_id
    -        if self.subject.subject_id is None:
    -            return '<%s %s:%d inactive>' % (
    -                self.__class__.__name__,
    -                self._type,
    -                id_
    -
    -            )
    -        return '<%s %s:%d active as %d>' % (
    -            self.__class__.__name__,
    -            self._type,
    -            id_,
    -            self.subject.subject_id
    -
    -        )
    -
    diff --git a/tests/examplefiles/test.idr b/tests/examplefiles/test.idr
    deleted file mode 100644
    index fd008d31..00000000
    --- a/tests/examplefiles/test.idr
    +++ /dev/null
    @@ -1,101 +0,0 @@
    -module Main
    -
    -data Ty = TyInt | TyBool | TyFun Ty Ty
    -
    -interpTy : Ty -> Type
    -interpTy TyInt       = Int
    -interpTy TyBool      = Bool
    -interpTy (TyFun s t) = interpTy s -> interpTy t
    -
    -using (G : Vect n Ty)
    -
    -  data Env : Vect n Ty -> Type where
    -      Nil  : Env Nil
    -      (::) : interpTy a -> Env G -> Env (a :: G)
    -
    -  data HasType : (i : Fin n) -> Vect n Ty -> Ty -> Type where
    -      stop : HasType fZ (t :: G) t
    -      pop  : HasType k G t -> HasType (fS k) (u :: G) t
    -
    -  lookup : HasType i G t -> Env G -> interpTy t
    -  lookup stop    (x :: xs) = x
    -  lookup (pop k) (x :: xs) = lookup k xs
    -
    -  data Expr : Vect n Ty -> Ty -> Type where
    -      Var : HasType i G t -> Expr G t
    -      Val : (x : Int) -> Expr G TyInt
    -      Lam : Expr (a :: G) t -> Expr G (TyFun a t)
    -      App : Expr G (TyFun a t) -> Expr G a -> Expr G t
    -      Op  : (interpTy a -> interpTy b -> interpTy c) -> Expr G a -> Expr G b ->
    -            Expr G c
    -      If  : Expr G TyBool -> Expr G a -> Expr G a -> Expr G a
    -      Bind : Expr G a -> (interpTy a -> Expr G b) -> Expr G b
    -
    -  dsl expr
    -      lambda      = Lam
    -      variable    = Var
    -      index_first = stop
    -      index_next  = pop
    -
    -  (<$>) : |(f : Expr G (TyFun a t)) -> Expr G a -> Expr G t
    -  (<$>) = \f, a => App f a
    -
    -  pure : Expr G a -> Expr G a
    -  pure = id
    -
    -  syntax IF [x] THEN [t] ELSE [e] = If x t e
    -
    -  (==) : Expr G TyInt -> Expr G TyInt -> Expr G TyBool
    -  (==) = Op (==)
    -
    -  (<) : Expr G TyInt -> Expr G TyInt -> Expr G TyBool
    -  (<) = Op (<)
    -
    -  instance Num (Expr G TyInt) where
    -    (+) x y = Op (+) x y
    -    (-) x y = Op (-) x y
    -    (*) x y = Op (*) x y
    -
    -    abs x = IF (x < 0) THEN (-x) ELSE x
    -
    -    fromInteger = Val . fromInteger
    -
    -  ||| Evaluates an expression in the given context.
    -  interp : Env G -> {static} Expr G t -> interpTy t
    -  interp env (Var i)     = lookup i env
    -  interp env (Val x)     = x
    -  interp env (Lam sc)    = \x => interp (x :: env) sc
    -  interp env (App f s)   = (interp env f) (interp env s)
    -  interp env (Op op x y) = op (interp env x) (interp env y)
    -  interp env (If x t e)  = if (interp env x) then (interp env t) else (interp env e)
    -  interp env (Bind v f)  = interp env (f (interp env v))
    -
    -  eId : Expr G (TyFun TyInt TyInt)
    -  eId = expr (\x => x)
    -
    -  eTEST : Expr G (TyFun TyInt (TyFun TyInt TyInt))
    -  eTEST = expr (\x, y => y)
    -
    -  eAdd : Expr G (TyFun TyInt (TyFun TyInt TyInt))
    -  eAdd = expr (\x, y => Op (+) x y)
    -
    -  eDouble : Expr G (TyFun TyInt TyInt)
    -  eDouble = expr (\x => App (App eAdd x) (Var stop))
    -
    -  eFac : Expr G (TyFun TyInt TyInt)
    -  eFac = expr (\x => IF x == 0 THEN 1 ELSE [| eFac (x - 1) |] * x)
    -
    -testFac : Int
    -testFac = interp [] eFac 4
    -
    ---testFacTooBig : Int
    ---testFacTooBig = interp [] eFac 100000
    -
    - {-testFacTooBig2 : Int
    -testFacTooBig2 = interp [] eFac 1000
    --}
    -
    -main : IO ()
    -main = print testFac
    -
    -
    diff --git a/tests/examplefiles/test.ini b/tests/examplefiles/test.ini
    deleted file mode 100644
    index a447803d..00000000
    --- a/tests/examplefiles/test.ini
    +++ /dev/null
    @@ -1,10 +0,0 @@
    -[section]
    -
    -foo = bar
    -continued = foo
    -  baz
    -conttwo =
    -  foo
    -; comment
    -# comment
    -
    diff --git a/tests/examplefiles/test.java b/tests/examplefiles/test.java
    deleted file mode 100644
    index 64c08531..00000000
    --- a/tests/examplefiles/test.java
    +++ /dev/null
    @@ -1,653 +0,0 @@
    -/*
    - * Created on 13-Mar-2004
    - * Created by James Yeh
    - * Copyright (C) 2004, 2005, 2006 Aelitis, All Rights Reserved.
    - *
    - * This program is free software; you can redistribute it and/or
    - * modify it under the terms of the GNU General Public License
    - * as published by the Free Software Foundation; either version 2
    - * of the License, or (at your option) any later version.
    - * This program is distributed in the hope that it will be useful,
    - * but WITHOUT ANY WARRANTY; without even the implied warranty of
    - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    - * GNU General Public License for more details.
    - * You should have received a copy of the GNU General Public License
    - * along with this program; if not, write to the Free Software
    - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
    - * 
    - * AELITIS, SAS au capital de 46,603.30 euros
    - * 8 Allee Lenotre, La Grille Royale, 78600 Le Mesnil le Roi, France.
    - *
    - */
    -
    -package org.gudy.azureus2.platform.macosx;
    -
    -import org.gudy.azureus2.core3.logging.*;
    -import org.gudy.azureus2.core3.util.AEMonitor;
    -import org.gudy.azureus2.core3.util.Debug;
    -import org.gudy.azureus2.core3.util.SystemProperties;
    -import org.gudy.azureus2.platform.PlatformManager;
    -import org.gudy.azureus2.platform.PlatformManagerCapabilities;
    -import org.gudy.azureus2.platform.PlatformManagerListener;
    -import org.gudy.azureus2.platform.macosx.access.jnilib.OSXAccess;
    -
    -import org.gudy.azureus2.plugins.platform.PlatformManagerException;
    -
    -import java.io.BufferedReader;
    -import java.io.File;
    -import java.io.IOException;
    -import java.io.InputStreamReader;
    -import java.text.MessageFormat;
    -import java.util.HashSet;
    -
    -
    -/**
    - * Performs platform-specific operations with Mac OS X
    - *
    - * @author James Yeh
    - * @version 1.0 Initial Version
    - * @see PlatformManager
    - */
    -public class PlatformManagerImpl implements PlatformManager
    -{
    -    private static final LogIDs LOGID = LogIDs.CORE;
    -
    -    protected static PlatformManagerImpl singleton;
    -    protected static AEMonitor class_mon = new AEMonitor("PlatformManager");
    -
    -    private static final String USERDATA_PATH = new File(System.getProperty("user.home") + "/Library/Application Support/").getPath();
    -
    -    //T: PlatformManagerCapabilities
    -    private final HashSet capabilitySet = new HashSet();
    -
    -    /**
    -     * Gets the platform manager singleton, which was already initialized
    -     */
    -    public static PlatformManagerImpl getSingleton()
    -    {
    -        return singleton;
    -    }
    -
    -    /**
    -     * Tries to enable cocoa-java access and instantiates the singleton
    -     */
    -    static
    -    {
    -        initializeSingleton();
    -    }
    -
    -    /**
    -     * Instantiates the singleton
    -     */
    -    private static void initializeSingleton()
    -    {
    -        try
    -        {
    -            class_mon.enter();
    -            singleton = new PlatformManagerImpl();
    -        }
    -        catch (Throwable e)
    -        {
    -        	Logger.log(new LogEvent(LOGID, "Failed to initialize platform manager"
    -					+ " for Mac OS X", e));
    -        }
    -        finally
    -        {
    -            class_mon.exit();
    -        }
    -    }
    -
    -    /**
    -     * Creates a new PlatformManager and initializes its capabilities
    -     */
    -    public PlatformManagerImpl()
    -    {
    -        capabilitySet.add(PlatformManagerCapabilities.RecoverableFileDelete);
    -        capabilitySet.add(PlatformManagerCapabilities.ShowFileInBrowser);
    -        capabilitySet.add(PlatformManagerCapabilities.ShowPathInCommandLine);
    -        capabilitySet.add(PlatformManagerCapabilities.CreateCommandLineProcess);
    -        capabilitySet.add(PlatformManagerCapabilities.GetUserDataDirectory);
    -        capabilitySet.add(PlatformManagerCapabilities.UseNativeScripting);
    -        capabilitySet.add(PlatformManagerCapabilities.PlaySystemAlert);
    -        
    -        if (OSXAccess.isLoaded()) {
    -	        capabilitySet.add(PlatformManagerCapabilities.GetVersion);
    -        }
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public int getPlatformType()
    -    {
    -        return PT_MACOSX;
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public String getVersion() throws PlatformManagerException
    -    {
    -    	if (!OSXAccess.isLoaded()) {
    -        throw new PlatformManagerException("Unsupported capability called on platform manager");
    -    	}
    -    	
    -    	return OSXAccess.getVersion();
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     * @see org.gudy.azureus2.core3.util.SystemProperties#getUserPath()
    -     */
    -    public String getUserDataDirectory() throws PlatformManagerException
    -    {
    -        return USERDATA_PATH;
    -    }
    -
    -	public File
    -	getLocation(
    -		long	location_id )
    -	
    -		throws PlatformManagerException
    -	{
    -		if ( location_id == LOC_USER_DATA ){
    -			
    -			return( new File( USERDATA_PATH ));
    -		}
    -		
    -		return( null );
    -	}
    -    /**
    -     * Not implemented; returns True
    -     */
    -    public boolean isApplicationRegistered() throws PlatformManagerException
    -    {
    -        return true;
    -    }
    -
    -    
    -	public String
    -	getApplicationCommandLine()
    -		throws PlatformManagerException
    -	{
    -		try{	    
    -			String	bundle_path = System.getProperty("user.dir") +SystemProperties.SEP+ SystemProperties.getApplicationName() + ".app";
    -
    -			File osx_app_bundle = new File( bundle_path ).getAbsoluteFile();
    -			
    -			if( !osx_app_bundle.exists() ) {
    -				String msg = "OSX app bundle not found: [" +osx_app_bundle.toString()+ "]";
    -				System.out.println( msg );
    -				if (Logger.isEnabled())
    -					Logger.log(new LogEvent(LOGID, msg));		
    -				throw new PlatformManagerException( msg );
    -			}
    -			
    -			return "open -a \"" +osx_app_bundle.toString()+ "\"";
    -			//return osx_app_bundle.toString() +"/Contents/MacOS/JavaApplicationStub";
    -			
    -		}
    -		catch( Throwable t ){	
    -			t.printStackTrace();
    -			return null;
    -		}
    -	}
    -	
    -	
    -	public boolean
    -	isAdditionalFileTypeRegistered(
    -		String		name,				// e.g. "BitTorrent"
    -		String		type )				// e.g. ".torrent"
    -	
    -		throws PlatformManagerException
    -	{
    -	    throw new PlatformManagerException("Unsupported capability called on platform manager");
    -	}
    -	
    -	public void
    -	unregisterAdditionalFileType(
    -		String		name,				// e.g. "BitTorrent"
    -		String		type )				// e.g. ".torrent"
    -		
    -		throws PlatformManagerException
    -	{
    -		throw new PlatformManagerException("Unsupported capability called on platform manager");
    -	}
    -	
    -	public void
    -	registerAdditionalFileType(
    -		String		name,				// e.g. "BitTorrent"
    -		String		description,		// e.g. "BitTorrent File"
    -		String		type,				// e.g. ".torrent"
    -		String		content_type )		// e.g. "application/x-bittorrent"
    -	
    -		throws PlatformManagerException
    -	{
    -	   throw new PlatformManagerException("Unsupported capability called on platform manager");
    -	}
    -	
    -    /**
    -     * Not implemented; does nothing
    -     */
    -    public void registerApplication() throws PlatformManagerException
    -    {
    -        // handled by LaunchServices and/0r user interaction
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public void createProcess(String cmd, boolean inheritsHandles) throws PlatformManagerException
    -    {
    -        try
    -        {
    -            performRuntimeExec(cmd.split(" "));
    -        }
    -        catch (Throwable e)
    -        {
    -            throw new PlatformManagerException("Failed to create process", e);
    -        }
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public void performRecoverableFileDelete(String path) throws PlatformManagerException
    -    {
    -        File file = new File(path);
    -        if(!file.exists())
    -        {
    -	        	if (Logger.isEnabled())
    -							Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, "Cannot find "
    -									+ file.getName()));
    -            return;
    -        }
    -
    -        boolean useOSA = !NativeInvocationBridge.sharedInstance().isEnabled() || !NativeInvocationBridge.sharedInstance().performRecoverableFileDelete(file);
    -
    -        if(useOSA)
    -        {
    -            try
    -            {
    -                StringBuffer sb = new StringBuffer();
    -                sb.append("tell application \"");
    -                sb.append("Finder");
    -                sb.append("\" to move (posix file \"");
    -                sb.append(path);
    -                sb.append("\" as alias) to the trash");
    -
    -                performOSAScript(sb);
    -            }
    -            catch (Throwable e)
    -            {
    -                throw new PlatformManagerException("Failed to move file", e);
    -            }
    -        }
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public boolean hasCapability(PlatformManagerCapabilities capability)
    -    {
    -        return capabilitySet.contains(capability);
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public void dispose()
    -    {
    -        NativeInvocationBridge.sharedInstance().dispose();
    -    }
    -
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public void setTCPTOSEnabled(boolean enabled) throws PlatformManagerException
    -    {
    -        throw new PlatformManagerException("Unsupported capability called on platform manager");
    -    }
    -
    -	public void
    -    copyFilePermissions(
    -		String	from_file_name,
    -		String	to_file_name )
    -	
    -		throws PlatformManagerException
    -	{
    -	    throw new PlatformManagerException("Unsupported capability called on platform manager");		
    -	}
    -	
    -    /**
    -     * {@inheritDoc}
    -     */
    -    public void showFile(String path) throws PlatformManagerException
    -    {
    -        File file = new File(path);
    -        if(!file.exists())
    -        {
    -        	if (Logger.isEnabled())
    -        		Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, "Cannot find "
    -        				+ file.getName()));
    -            throw new PlatformManagerException("File not found");
    -        }
    -
    -        showInFinder(file);
    -    }
    -
    -    // Public utility methods not shared across the interface
    -
    -    /**
    -     * Plays the system alert (the jingle is specified by the user in System Preferences)
    -     */
    -    public void playSystemAlert()
    -    {
    -        try
    -        {
    -            performRuntimeExec(new String[]{"beep"});
    -        }
    -        catch (IOException e)
    -        {
    -        	if (Logger.isEnabled())
    -        		Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING,
    -						"Cannot play system alert"));
    -        	Logger.log(new LogEvent(LOGID, "", e));
    -        }
    -    }
    -
    -    /**
    -     * 

    Shows the given file or directory in Finder

    - * @param path Absolute path to the file or directory - */ - public void showInFinder(File path) - { - boolean useOSA = !NativeInvocationBridge.sharedInstance().isEnabled() || !NativeInvocationBridge.sharedInstance().showInFinder(path); - - if(useOSA) - { - StringBuffer sb = new StringBuffer(); - sb.append("tell application \""); - sb.append(getFileBrowserName()); - sb.append("\" to reveal (posix file \""); - sb.append(path); - sb.append("\" as alias)"); - - try - { - performOSAScript(sb); - } - catch (IOException e) - { - Logger.log(new LogAlert(LogAlert.UNREPEATABLE, LogAlert.AT_ERROR, e - .getMessage())); - } - } - } - - /** - *

    Shows the given file or directory in Terminal by executing cd /absolute/path/to

    - * @param path Absolute path to the file or directory - */ - public void showInTerminal(String path) - { - showInTerminal(new File(path)); - } - - /** - *

    Shows the given file or directory in Terminal by executing cd /absolute/path/to

    - * @param path Absolute path to the file or directory - */ - public void showInTerminal(File path) - { - if (path.isFile()) - { - path = path.getParentFile(); - } - - if (path != null && path.isDirectory()) - { - StringBuffer sb = new StringBuffer(); - sb.append("tell application \""); - sb.append("Terminal"); - sb.append("\" to do script \"cd "); - sb.append(path.getAbsolutePath().replaceAll(" ", "\\ ")); - sb.append("\""); - - try - { - performOSAScript(sb); - } - catch (IOException e) - { - Logger.log(new LogAlert(LogAlert.UNREPEATABLE, LogAlert.AT_ERROR, e - .getMessage())); - } - } - else - { - if (Logger.isEnabled()) - Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, "Cannot find " - + path.getName())); - } - } - - // Internal utility methods - - /** - * Compiles a new AppleScript instance and runs it - * @param cmd AppleScript command to execute; do not surround command with extra quotation marks - * @return Output of the script - * @throws IOException If the script failed to execute - */ - protected static String performOSAScript(CharSequence cmd) throws IOException - { - return performOSAScript(new CharSequence[]{cmd}); - } - - /** - * Compiles a new AppleScript instance and runs it - * @param cmds AppleScript Sequence of commands to execute; do not surround command with extra quotation marks - * @return Output of the script - * @throws IOException If the script failed to execute - */ - protected static String performOSAScript(CharSequence[] cmds) throws IOException - { - long start = System.currentTimeMillis(); - Debug.outNoStack("Executing OSAScript: "); - for (int i = 0; i < cmds.length; i++) - { - Debug.outNoStack("\t" + cmds[i]); - } - - String[] cmdargs = new String[2 * cmds.length + 1]; - cmdargs[0] = "osascript"; - for (int i = 0; i < cmds.length; i++) - { - cmdargs[i * 2 + 1] = "-e"; - cmdargs[i * 2 + 2] = String.valueOf(cmds[i]); - } - - Process osaProcess = performRuntimeExec(cmdargs); - BufferedReader reader = new BufferedReader(new InputStreamReader(osaProcess.getInputStream())); - String line = reader.readLine(); - reader.close(); - Debug.outNoStack("OSAScript Output: " + line); - - reader = new BufferedReader(new InputStreamReader(osaProcess.getErrorStream())); - String errorMsg = reader.readLine(); - reader.close(); - - Debug.outNoStack("OSAScript Error (if any): " + errorMsg); - - Debug.outNoStack(MessageFormat.format("OSAScript execution ended ({0}ms)", new Object[]{String.valueOf(System.currentTimeMillis() - start)})); - - if (errorMsg != null) - { - throw new IOException(errorMsg); - } - - return line; - } - - /** - * Compiles a new AppleScript instance and runs it - * @param script AppleScript file (.scpt) to execute - * @return Output of the script - * @throws IOException If the script failed to execute - */ - protected static String performOSAScript(File script) throws IOException - { - long start = System.currentTimeMillis(); - Debug.outNoStack("Executing OSAScript from file: " + script.getPath()); - - Process osaProcess = performRuntimeExec(new String[]{"osascript", script.getPath()}); - BufferedReader reader = new BufferedReader(new InputStreamReader(osaProcess.getInputStream())); - String line = reader.readLine(); - reader.close(); - Debug.outNoStack("OSAScript Output: " + line); - - reader = new BufferedReader(new InputStreamReader(osaProcess.getErrorStream())); - String errorMsg = reader.readLine(); - reader.close(); - - Debug.outNoStack("OSAScript Error (if any): " + errorMsg); - - Debug.outNoStack(MessageFormat.format("OSAScript execution ended ({0}ms)", new Object[]{String.valueOf(System.currentTimeMillis() - start)})); - - if (errorMsg != null) - { - throw new IOException(errorMsg); - } - - return line; - } - - /** - * Compiles a new AppleScript instance to the specified location - * @param cmd Command to compile; do not surround command with extra quotation marks - * @param destination Destination location of the AppleScript file - * @return True if compiled successfully - */ - protected static boolean compileOSAScript(CharSequence cmd, File destination) - { - return compileOSAScript(new CharSequence[]{cmd}, destination); - } - - /** - * Compiles a new AppleScript instance to the specified location - * @param cmds Sequence of commands to compile; do not surround command with extra quotation marks - * @param destination Destination location of the AppleScript file - * @return True if compiled successfully - */ - protected static boolean compileOSAScript(CharSequence[] cmds, File destination) - { - long start = System.currentTimeMillis(); - Debug.outNoStack("Compiling OSAScript: " + destination.getPath()); - for (int i = 0; i < cmds.length; i++) - { - Debug.outNoStack("\t" + cmds[i]); - } - - String[] cmdargs = new String[2 * cmds.length + 3]; - cmdargs[0] = "osacompile"; - for (int i = 0; i < cmds.length; i++) - { - cmdargs[i * 2 + 1] = "-e"; - cmdargs[i * 2 + 2] = String.valueOf(cmds[i]); - } - - cmdargs[cmdargs.length - 2] = "-o"; - cmdargs[cmdargs.length - 1] = destination.getPath(); - - String errorMsg; - try - { - Process osaProcess = performRuntimeExec(cmdargs); - - BufferedReader reader = new BufferedReader(new InputStreamReader(osaProcess.getErrorStream())); - errorMsg = reader.readLine(); - reader.close(); - } - catch (IOException e) - { - Debug.outNoStack("OSACompile Execution Failed: " + e.getMessage()); - Debug.printStackTrace(e); - return false; - } - - Debug.outNoStack("OSACompile Error (if any): " + errorMsg); - - Debug.outNoStack(MessageFormat.format("OSACompile execution ended ({0}ms)", new Object[]{String.valueOf(System.currentTimeMillis() - start)})); - - return (errorMsg == null); - } - - /** - * @see Runtime#exec(String[]) - */ - protected static Process performRuntimeExec(String[] cmdargs) throws IOException - { - try - { - return Runtime.getRuntime().exec(cmdargs); - } - catch (IOException e) - { - Logger.log(new LogAlert(LogAlert.UNREPEATABLE, e.getMessage(), e)); - throw e; - } - } - - /** - *

    Gets the preferred file browser name

    - *

    Currently supported browsers are Path Finder and Finder. If Path Finder is currently running - * (not just installed), then "Path Finder is returned; else, "Finder" is returned.

    - * @return "Path Finder" if it is currently running; else "Finder" - */ - private static String getFileBrowserName() - { - try - { - // slowwwwwwww - if ("true".equalsIgnoreCase(performOSAScript("tell application \"System Events\" to exists process \"Path Finder\""))) - { - Debug.outNoStack("Path Finder is running"); - - return "Path Finder"; - } - else - { - return "Finder"; - } - } - catch (IOException e) - { - Debug.printStackTrace(e); - Logger.log(new LogEvent(LOGID, e.getMessage(), e)); - - return "Finder"; - } - } - - public boolean - testNativeAvailability( - String name ) - - throws PlatformManagerException - { - throw new PlatformManagerException("Unsupported capability called on platform manager"); - } - - public void - addListener( - PlatformManagerListener listener ) - { - } - - public void - removeListener( - PlatformManagerListener listener ) - { - } -} diff --git a/tests/examplefiles/test.jsp b/tests/examplefiles/test.jsp deleted file mode 100644 index 1c6664da..00000000 --- a/tests/examplefiles/test.jsp +++ /dev/null @@ -1,24 +0,0 @@ - -<%= var x = 1; -%> -<%! int i = 0; %> -<%! int a, b, c; %> -<%! Circle a = new Circle(2.0); %> - -<% - String name = null; - if (request.getParameter("name") == null) { -%> -<%@ include file="error.html" %> -<% - } else { - foo.setName(request.getParameter("name")); - if (foo.getName().equalsIgnoreCase("integra")) - name = "acura"; - if (name.equalsIgnoreCase( "acura" )) { -%> - - -

    -Calendar of -

    diff --git a/tests/examplefiles/test.lean b/tests/examplefiles/test.lean deleted file mode 100644 index a7b7e261..00000000 --- a/tests/examplefiles/test.lean +++ /dev/null @@ -1,217 +0,0 @@ -/- -Theorems/Exercises from "Logical Investigations, with the Nuprl Proof Assistant" -by Robert L. Constable and Anne Trostle -http://www.nuprl.org/MathLibrary/LogicalInvestigations/ --/ -import logic - --- 2. The Minimal Implicational Calculus -theorem thm1 {A B : Prop} : A → B → A := -assume Ha Hb, Ha - -theorem thm2 {A B C : Prop} : (A → B) → (A → B → C) → (A → C) := -assume Hab Habc Ha, - Habc Ha (Hab Ha) - -theorem thm3 {A B C : Prop} : (A → B) → (B → C) → (A → C) := -assume Hab Hbc Ha, - Hbc (Hab Ha) - --- 3. False Propositions and Negation -theorem thm4 {P Q : Prop} : ¬P → P → Q := -assume Hnp Hp, - absurd Hp Hnp - -theorem thm5 {P : Prop} : P → ¬¬P := -assume (Hp : P) (HnP : ¬P), - absurd Hp HnP - -theorem thm6 {P Q : Prop} : (P → Q) → (¬Q → ¬P) := -assume (Hpq : P → Q) (Hnq : ¬Q) (Hp : P), - have Hq : Q, from Hpq Hp, - show false, from absurd Hq Hnq - -theorem thm7 {P Q : Prop} : (P → ¬P) → (P → Q) := -assume Hpnp Hp, - absurd Hp (Hpnp Hp) - -theorem thm8 {P Q : Prop} : ¬(P → Q) → (P → ¬Q) := -assume (Hn : ¬(P → Q)) (Hp : P) (Hq : Q), - -- Rermak we don't even need the hypothesis Hp - have H : P → Q, from assume H', Hq, - absurd H Hn - --- 4. Conjunction and Disjunction -theorem thm9 {P : Prop} : (P ∨ ¬P) → (¬¬P → P) := -assume (em : P ∨ ¬P) (Hnn : ¬¬P), - or_elim em - (assume Hp, Hp) - (assume Hn, absurd Hn Hnn) - -theorem thm10 {P : Prop} : ¬¬(P ∨ ¬P) := -assume Hnem : ¬(P ∨ ¬P), - have Hnp : ¬P, from - assume Hp : P, - have Hem : P ∨ ¬P, from or_inl Hp, - absurd Hem Hnem, - have Hem : P ∨ ¬P, from or_inr Hnp, - absurd Hem Hnem - -theorem thm11 {P Q : Prop} : ¬P ∨ ¬Q → ¬(P ∧ Q) := -assume (H : ¬P ∨ ¬Q) (Hn : P ∧ Q), - or_elim H - (assume Hnp : ¬P, absurd (and_elim_left Hn) Hnp) - (assume Hnq : ¬Q, absurd (and_elim_right Hn) Hnq) - -theorem thm12 {P Q : Prop} : ¬(P ∨ Q) → ¬P ∧ ¬Q := -assume H : ¬(P ∨ Q), - have Hnp : ¬P, from assume Hp : P, absurd (or_inl Hp) H, - have Hnq : ¬Q, from assume Hq : Q, absurd (or_inr Hq) H, - and_intro Hnp Hnq - -theorem thm13 {P Q : Prop} : ¬P ∧ ¬Q → ¬(P ∨ Q) := -assume (H : ¬P ∧ ¬Q) (Hn : P ∨ Q), - or_elim Hn - (assume Hp : P, absurd Hp (and_elim_left H)) - (assume Hq : Q, absurd Hq (and_elim_right H)) - -theorem thm14 {P Q : Prop} : ¬P ∨ Q → P → Q := -assume (Hor : ¬P ∨ Q) (Hp : P), - or_elim Hor - (assume Hnp : ¬P, absurd Hp Hnp) - (assume Hq : Q, Hq) - -theorem thm15 {P Q : Prop} : (P → Q) → ¬¬(¬P ∨ Q) := -assume (Hpq : P → Q) (Hn : ¬(¬P ∨ Q)), - have H1 : ¬¬P ∧ ¬Q, from thm12 Hn, - have Hnp : ¬P, from mt Hpq (and_elim_right H1), - absurd Hnp (and_elim_left H1) - -theorem thm16 {P Q : Prop} : (P → Q) ∧ ((P ∨ ¬P) ∨ (Q ∨ ¬Q)) → ¬P ∨ Q := -assume H : (P → Q) ∧ ((P ∨ ¬P) ∨ (Q ∨ ¬Q)), - have Hpq : P → Q, from and_elim_left H, - or_elim (and_elim_right H) - (assume Hem1 : P ∨ ¬P, or_elim Hem1 - (assume Hp : P, or_inr (Hpq Hp)) - (assume Hnp : ¬P, or_inl Hnp)) - (assume Hem2 : Q ∨ ¬Q, or_elim Hem2 - (assume Hq : Q, or_inr Hq) - (assume Hnq : ¬Q, or_inl (mt Hpq Hnq))) - --- 5. First-Order Logic: All and Exists -section -parameters {T : Type} {C : Prop} {P : T → Prop} -theorem thm17a : (C → ∀x, P x) → (∀x, C → P x) := -assume H : C → ∀x, P x, - take x : T, assume Hc : C, - H Hc x - -theorem thm17b : (∀x, C → P x) → (C → ∀x, P x) := -assume (H : ∀x, C → P x) (Hc : C), - take x : T, - H x Hc - -theorem thm18a : ((∃x, P x) → C) → (∀x, P x → C) := -assume H : (∃x, P x) → C, - take x, assume Hp : P x, - have Hex : ∃x, P x, from exists_intro x Hp, - H Hex - -theorem thm18b : (∀x, P x → C) → (∃x, P x) → C := -assume (H1 : ∀x, P x → C) (H2 : ∃x, P x), - obtain (w : T) (Hw : P w), from H2, - H1 w Hw - -theorem thm19a : (C ∨ ¬C) → (∃x : T, true) → (C → (∃x, P x)) → (∃x, C → P x) := -assume (Hem : C ∨ ¬C) (Hin : ∃x : T, true) (H1 : C → ∃x, P x), - or_elim Hem - (assume Hc : C, - obtain (w : T) (Hw : P w), from H1 Hc, - have Hr : C → P w, from assume Hc, Hw, - exists_intro w Hr) - (assume Hnc : ¬C, - obtain (w : T) (Hw : true), from Hin, - have Hr : C → P w, from assume Hc, absurd Hc Hnc, - exists_intro w Hr) - -theorem thm19b : (∃x, C → P x) → C → (∃x, P x) := -assume (H : ∃x, C → P x) (Hc : C), - obtain (w : T) (Hw : C → P w), from H, - exists_intro w (Hw Hc) - -theorem thm20a : (C ∨ ¬C) → (∃x : T, true) → ((¬∀x, P x) → ∃x, ¬P x) → ((∀x, P x) → C) → (∃x, P x → C) := -assume Hem Hin Hnf H, - or_elim Hem - (assume Hc : C, - obtain (w : T) (Hw : true), from Hin, - exists_intro w (assume H : P w, Hc)) - (assume Hnc : ¬C, - have H1 : ¬(∀x, P x), from mt H Hnc, - have H2 : ∃x, ¬P x, from Hnf H1, - obtain (w : T) (Hw : ¬P w), from H2, - exists_intro w (assume H : P w, absurd H Hw)) - -theorem thm20b : (∃x, P x → C) → (∀ x, P x) → C := -assume Hex Hall, - obtain (w : T) (Hw : P w → C), from Hex, - Hw (Hall w) - -theorem thm21a : (∃x : T, true) → ((∃x, P x) ∨ C) → (∃x, P x ∨ C) := -assume Hin H, - or_elim H - (assume Hex : ∃x, P x, - obtain (w : T) (Hw : P w), from Hex, - exists_intro w (or_inl Hw)) - (assume Hc : C, - obtain (w : T) (Hw : true), from Hin, - exists_intro w (or_inr Hc)) - -theorem thm21b : (∃x, P x ∨ C) → ((∃x, P x) ∨ C) := -assume H, - obtain (w : T) (Hw : P w ∨ C), from H, - or_elim Hw - (assume H : P w, or_inl (exists_intro w H)) - (assume Hc : C, or_inr Hc) - -theorem thm22a : (∀x, P x) ∨ C → ∀x, P x ∨ C := -assume H, take x, - or_elim H - (assume Hl, or_inl (Hl x)) - (assume Hr, or_inr Hr) - -theorem thm22b : (C ∨ ¬C) → (∀x, P x ∨ C) → ((∀x, P x) ∨ C) := -assume Hem H1, - or_elim Hem - (assume Hc : C, or_inr Hc) - (assume Hnc : ¬C, - have Hx : ∀x, P x, from - take x, - have H1 : P x ∨ C, from H1 x, - resolve_left H1 Hnc, - or_inl Hx) - -theorem thm23a : (∃x, P x) ∧ C → (∃x, P x ∧ C) := -assume H, - have Hex : ∃x, P x, from and_elim_left H, - have Hc : C, from and_elim_right H, - obtain (w : T) (Hw : P w), from Hex, - exists_intro w (and_intro Hw Hc) - -theorem thm23b : (∃x, P x ∧ C) → (∃x, P x) ∧ C := -assume H, - obtain (w : T) (Hw : P w ∧ C), from H, - have Hex : ∃x, P x, from exists_intro w (and_elim_left Hw), - and_intro Hex (and_elim_right Hw) - -theorem thm24a : (∀x, P x) ∧ C → (∀x, P x ∧ C) := -assume H, take x, - and_intro (and_elim_left H x) (and_elim_right H) - -theorem thm24b : (∃x : T, true) → (∀x, P x ∧ C) → (∀x, P x) ∧ C := -assume Hin H, - obtain (w : T) (Hw : true), from Hin, - have Hc : C, from and_elim_right (H w), - have Hx : ∀x, P x, from take x, and_elim_left (H x), - and_intro Hx Hc - -end -- of section diff --git a/tests/examplefiles/test.maql b/tests/examplefiles/test.maql deleted file mode 100644 index a44935fd..00000000 --- a/tests/examplefiles/test.maql +++ /dev/null @@ -1,45 +0,0 @@ -# MAQL script -CREATE DATASET {dataset.quotes} VISUAL(TITLE "St\\tock Qu\totes Data"); - -# A comment -CREATE DATASET {dataset.quotes} VISUAL(TITLE "Stock Qu\"otes Data"); - -CREATE DATASET {dataset.quotes} VISUAL(TITLE "Stock Quotes Data"); - -ALTER DATASET {dataset.quotes} ADD {attribute.sector}; - -ALTER DATASET {dataset.quotes} DROP {attribute.symbol}; - -ALTER DATASET {dataset.quotes} VISUAL(TITLE "Internal Quotes Data"); - -CREATE ATTRIBUTE {attr.quotes.symbol} - VISUAL(TITLE "Symbol", FOLDER {folder.quotes.attr}) - AS {d_quotes_symbol.nm_symbol}; - -ALTER ATTRIBUTE {attr.quotes.symbol} - ADD LABELS {attr.quotes.company} VISUAL(TITLE "Company") - AS {d_quotes_symbol.nm_company}; - -CREATE FACT {fact.quotes.open_price} - VISUAL( TITLE "Open Price", FOLDER {folder.quotes.fact}) AS {f_quotes.f_open_price}; - -ALTER FACT {fact.quotes.open_price} ADD {f_quotes2.f_open_price}; - -CREATE FOLDER {folder.quotes.attr} - VISUAL ( TITLE "Stock Quotes Data", - DESCRIPTION "Stock quotes data obtained from John Doe etc." ) - TYPE ATTRIBUTE; - -ALTER DATATYPE {d_quotes_symbol.nm_symbol} VARCHAR(4), - {d_quotes_symbol.nm_symbol} VARCHAR(80), - {f_quotes.f_open_price} DECIMAL(10,2); - -INCLUDE TEMPLATE "URN:GOODDATA:DATE" MODIFY (IDENTIFIER "my-date", TITLE "quote") - -ALTER ATTRIBUTE {attr.quotes.symbol} ADD KEYS {d_quotes_symbol.nm_symbol} PRIMARY; - -ALTER ATTRIBUTE {attr.quotes.symbol} DROP KEYS {d_quotes_symbol.nm_symbol}; - -ALTER FACT {fact.quotes.open_price} ADD {f_quotes2.f_open_price}; - -# Another comment diff --git a/tests/examplefiles/test.mask b/tests/examplefiles/test.mask deleted file mode 100644 index 39134d74..00000000 --- a/tests/examplefiles/test.mask +++ /dev/null @@ -1,41 +0,0 @@ - -// comment -h4.class-1#id.class-2.other checked='true' disabled name = x param > 'Enter ..' -input placeholder=Password type=password > - :dualbind x-signal='dom:create' value=user.passord; -% each='flowers' > - div style=' - position: absolute; - display: inline-block; - background: url("image.png") center center no-repeat; - '; -#skippedDiv.other { - img src='~[url]'; - div style="text-align:center;" { - '~[: $obj.foo("username", name) + 2]' - "~[Localize: stringId]" - } - - p > """ - - Hello "world" - """ - - p > ' - Hello "world" - ' - - p > "Hello 'world'" - - :customComponent x-value='tt'; - /* footer > '(c) 2014' */ -} - -.skippedDiv > - span > - #skipped > - table > - td > - tr > ';)' - -br; \ No newline at end of file diff --git a/tests/examplefiles/test.mod b/tests/examplefiles/test.mod deleted file mode 100644 index ba972e30..00000000 --- a/tests/examplefiles/test.mod +++ /dev/null @@ -1,374 +0,0 @@ -(* LIFO Storage Library - * - * @file LIFO.mod - * LIFO implementation - * - * Universal Dynamic Stack - * - * Author: Benjamin Kowarsch - * - * Copyright (C) 2009 Benjamin Kowarsch. All rights reserved. - * - * License: - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met - * - * 1) NO FEES may be charged for the provision of the software. The software - * may NOT be published on websites that contain advertising, unless - * specific prior written permission has been obtained. - * - * 2) Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 3) Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and other materials provided with the distribution. - * - * 4) Neither the author's name nor the names of any contributors may be used - * to endorse or promote products derived from this software without - * specific prior written permission. - * - * 5) Where this list of conditions or the following disclaimer, in part or - * as a whole is overruled or nullified by applicable law, no permission - * is granted to use the software. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - *) - - -IMPLEMENTATION (* OF *) MODULE LIFO; - -FROM SYSTEM IMPORT ADDRESS, ADR, TSIZE; -FROM Storage IMPORT ALLOCATE, DEALLOCATE; - - -(* --------------------------------------------------------------------------- -// Private type : ListEntry -// --------------------------------------------------------------------------- -*) -TYPE ListPtr = POINTER TO ListEntry; - -TYPE ListEntry = RECORD - value : DataPtr; - next : ListPtr -END; (* ListEntry *) - - -(* --------------------------------------------------------------------------- -// Opaque type : LIFO.Stack -// --------------------------------------------------------------------------- -// CAUTION: Modula-2 does not support the use of variable length array fields -// in records. VLAs can only be implemented using pointer arithmetic which -// means there is no type checking and no boundary checking on the array. -// It also means that array notation cannot be used on the array which makes -// the code difficult to read and maintain. As a result, Modula-2 is less -// safe and less readable than C when it comes to using VLAs. Great care must -// be taken to make sure that the code accessing VLA fields is safe. Boundary -// checks must be inserted manually. Size checks must be inserted manually to -// compensate for the absence of type checks. *) - -TYPE Stack = POINTER TO StackDescriptor; - -TYPE StackDescriptor = RECORD - overflow : ListPtr; - entryCount : StackSize; - arraySize : StackSize; - array : ADDRESS (* ARRAY OF DataPtr *) -END; (* StackDescriptor *) - - -(* --------------------------------------------------------------------------- -// function: LIFO.new( initial_size, status ) -// --------------------------------------------------------------------------- -// -// Creates and returns a new LIFO stack object with an initial capacity of -// . If zero is passed in for , then the stack -// will be created with an initial capacity of LIFO.defaultStackSize. The -// function fails if a value greater than LIFO.maximumStackSize is passed -// in for or if memory could not be allocated. -// -// The initial capacity of a stack is the number of entries that can be stored -// in the stack without enlargement. -// -// The status of the operation is passed back in . *) - -PROCEDURE new ( initialSize : StackSize; VAR status : Status ) : Stack; - -VAR - newStack : Stack; - -BEGIN - - (* zero size means default *) - IF initialSize = 0 THEN - initialSize := defaultStackSize; - END; (* IF *) - - (* bail out if initial size is too high *) - IF initialSize > maximumStackSize THEN - status := invalidSize; - RETURN NIL; - END; (* IF *) - - (* allocate new stack object *) - ALLOCATE(newStack, TSIZE(Stack) + TSIZE(DataPtr) * (initialSize - 1)); - - (* bail out if allocation failed *) - IF newStack = NIL THEN - status := allocationFailed; - RETURN NIL; - END; (* IF *) - - (* initialise meta data *) - newStack^.arraySize := initialSize; - newStack^.entryCount := 0; - newStack^.overflow := NIL; - - (* pass status and new stack to caller *) - status := success; - RETURN newStack - -END new; - - -(* --------------------------------------------------------------------------- -// function: LIFO.push( stack, value, status ) -// --------------------------------------------------------------------------- -// -// Adds a new entry to the top of stack . The new entry is -// added by reference, no data is copied. However, no entry is added if the -// the stack is full, that is when the number of entries stored in the stack -// has reached LIFO.maximumStackSize. The function fails if NIL is passed in -// for or , or if memory could not be allocated. -// -// New entries are allocated dynamically if the number of entries exceeds the -// initial capacity of the stack. -// -// The status of the operation is passed back in . *) - -PROCEDURE push ( VAR stack : Stack; value : DataPtr; VAR status : Status ); -VAR - newEntry : ListPtr; - valuePtr : POINTER TO DataPtr; - -BEGIN - - (* bail out if stack is NIL *) - IF stack = NIL THEN - status := invalidStack; - RETURN; - END; (* IF *) - - (* bail out if value is NIL *) - IF value = NIL THEN - status := invalidData; - RETURN; - END; (* IF *) - - (* bail out if stack is full *) - IF stack^.entryCount >= maximumStackSize THEN - status := stackFull; - RETURN; - END; (* IF *) - - (* check if index falls within array segment *) - IF stack^.entryCount < stack^.arraySize THEN - - (* store value in array segment *) - - (* stack^.array^[stack^.entryCount] := value; *) - valuePtr := ADR(stack^.array) + TSIZE(DataPtr) * stack^.entryCount; - valuePtr^ := value; - - ELSE (* index falls within overflow segment *) - - (* allocate new entry slot *) - NEW(newEntry); - - (* bail out if allocation failed *) - IF newEntry = NIL THEN - status := allocationFailed; - RETURN; - END; (* IF *) - - (* initialise new entry *) - newEntry^.value := value; - - (* link new entry into overflow list *) - newEntry^.next := stack^.overflow; - stack^.overflow := newEntry; - - END; (* IF *) - - (* update entry counter *) - INC(stack^.entryCount); - - (* pass status to caller *) - status := success; - RETURN - -END push; - - -(* --------------------------------------------------------------------------- -// function: LIFO.pop( stack, status ) -// --------------------------------------------------------------------------- -// -// Removes the top most value from stack and returns it. If the stack -// is empty, that is when the number of entries stored in the stack has -// reached zero, then NIL is returned. -// -// Entries which were allocated dynamically (above the initial capacity) are -// deallocated when their values are popped. -// -// The status of the operation is passed back in . *) - -PROCEDURE pop ( VAR stack : Stack; VAR status : Status ) : DataPtr; - -VAR - thisValue : DataPtr; - thisEntry : ListPtr; - valuePtr : POINTER TO DataPtr; - -BEGIN - - (* bail out if stack is NIL *) - IF stack = NIL THEN - status := invalidStack; - RETURN NIL; - END; (* IF *) - - (* bail out if stack is empty *) - IF stack^.entryCount = 0 THEN - status := stackEmpty; - RETURN NIL; - END; (* IF *) - - DEC(stack^.entryCount); - - (* check if index falls within array segment *) - IF stack^.entryCount < stack^.arraySize THEN - - (* obtain value at index entryCount in array segment *) - - (* thisValue := stack^.array^[stack^.entryCount]; *) - valuePtr := ADR(stack^.array) + TSIZE(DataPtr) * stack^.entryCount; - thisValue := valuePtr^; - - ELSE (* index falls within overflow segment *) - - (* obtain value of first entry in overflow list *) - thisValue := stack^.overflow^.value; - - (* isolate first entry in overflow list *) - thisEntry := stack^.overflow; - stack^.overflow := stack^.overflow^.next; - - (* remove the entry from overflow list *) - DISPOSE(thisEntry); - - END; (* IF *) - - (* return value and status to caller *) - status := success; - RETURN thisValue - -END pop; - - -(* --------------------------------------------------------------------------- -// function: LIFO.stackSize( stack ) -// --------------------------------------------------------------------------- -// -// Returns the current capacity of . The current capacity is the total -// number of allocated entries. Returns zero if NIL is passed in for . -*) -PROCEDURE stackSize( VAR stack : Stack ) : StackSize; - -BEGIN - - (* bail out if stack is NIL *) - IF stack = NIL THEN - RETURN 0; - END; (* IF *) - - IF stack^.entryCount < stack^.arraySize THEN - RETURN stack^.arraySize; - ELSE - RETURN stack^.entryCount; - END; (* IF *) - -END stackSize; - - -(* --------------------------------------------------------------------------- -// function: LIFO.stackEntries( stack ) -// --------------------------------------------------------------------------- -// -// Returns the number of entries stored in stack , returns zero if -// NIL is passed in for . *) - -PROCEDURE stackEntries( VAR stack : Stack ) : StackSize; - -BEGIN - - (* bail out if stack is NIL *) - IF stack = NIL THEN - RETURN 0; - END; (* IF *) - - RETURN stack^.entryCount - -END stackEntries; - - -(* --------------------------------------------------------------------------- -// function: LIFO.dispose( stack ) -// --------------------------------------------------------------------------- -// -// Disposes of LIFO stack object . Returns NIL. *) - -PROCEDURE dispose ( VAR stack : Stack ) : Stack; - -VAR - thisEntry : ListPtr; - -BEGIN - - (* bail out if stack is NIL *) - IF stack = NIL THEN - RETURN NIL; - END; (* IF *) - - (* deallocate any entries in stack's overflow list *) - WHILE stack^.overflow # NIL DO - - (* isolate first entry in overflow list *) - thisEntry := stack^.overflow; - stack^.overflow := stack^.overflow^.next; - - (* deallocate the entry *) - DISPOSE(thisEntry); - - END; (* WHILE *) - - (* deallocate stack object and pass NIL to caller *) - DEALLOCATE(stack, TSIZE(Stack) + TSIZE(DataPtr) * (stack^.arraySize - 1)); - RETURN NIL - -END dispose; - - -END LIFO. diff --git a/tests/examplefiles/test.moo b/tests/examplefiles/test.moo deleted file mode 100644 index dec71ba8..00000000 --- a/tests/examplefiles/test.moo +++ /dev/null @@ -1,51 +0,0 @@ -you_lose_msg = "Either that person does not exist, or has a different password."; -if (!(caller in {#0, this})) - return E_PERM; - "...caller isn't :do_login_command..."; -elseif (args && (args[1] == "test")) - return this:test(@listdelete(args, 1)); -elseif (!(length(args) in {1, 2})) - notify(player, tostr("Usage: ", verb, " ")); -elseif (!valid(candidate = this:_match_player(name = strsub(args[1], " ", "_")))) - if (name == "guest") - "must be no guests"; - this:notify_lines(this:registration_text("guest")); - else - notify(player, you_lose_msg); - endif - "...unknown player..."; -elseif (is_clear_property(candidate, "password") || ((typeof(candidate.password) == STR) && ((length(candidate.password) < 2) || (crypt({@args, ""}[2], candidate.password) != candidate.password)))) - notify(player, you_lose_msg); - "...bad password..."; - server_log(tostr("FAILED CONNECT: ", args[1], " (", candidate, ") on ", connection_name(player), ($string_utils:connection_hostname(connection_name(player)) in candidate.all_connect_places) ? "" | "******")); -elseif (((candidate.name == "guest") && this.sitematch_guests) && valid(foreigner = $country_db:get_guest())) - notify(player, tostr("Okay,... Logging you in as `", foreigner:name(), "'")); - this:record_connection(foreigner); - return foreigner; -elseif ((parent(candidate) == $guest) && (!valid(candidate = candidate:defer()))) - if (candidate == #-3) - notify(player, "Sorry, guest characters are not allowed from your site right now."); - elseif (candidate == #-2) - this:notify_lines(this:registration_text("blacklisted", "Sorry, guest characters are not allowed from your site.")); - elseif (candidate == #-4) - this:notify_lines(this:registration_text("guest")); - else - notify(player, "Sorry, all of our guest characters are in use right now."); - endif -else - if ((!(name in candidate.aliases)) && (name != tostr(candidate))) - notify(player, tostr("Okay,... ", name, " is in use. Logging you in as `", candidate:name(), "'")); - endif - if (this:is_newted(candidate)) - notify(player, ""); - notify(player, this:newt_message_for(candidate)); - notify(player, ""); - else - this:record_connection(candidate); - if (verb[1] == "s") - candidate.use_do_command = 0; - endif - return candidate; - endif -endif -return 0; \ No newline at end of file diff --git a/tests/examplefiles/test.mt b/tests/examplefiles/test.mt deleted file mode 100644 index 008dc88e..00000000 --- a/tests/examplefiles/test.mt +++ /dev/null @@ -1,7 +0,0 @@ -exports (main) - -def main(=> currentProcess) :Int as DeepFrozen: - traceln(`Current process: $currentProcess`) - "A \r \n \x00 \u1234" - '\u1234' - return 0 diff --git a/tests/examplefiles/test.myt b/tests/examplefiles/test.myt deleted file mode 100644 index 1668f7a6..00000000 --- a/tests/examplefiles/test.myt +++ /dev/null @@ -1,166 +0,0 @@ -<%doc>formatting.myt - Provides section formatting elements, syntax-highlighted code blocks, and other special filters. - -<%global> - import string, re - import highlight - - -<%method section> -<%doc>Main section formatting element. -<%args> - toc - path - description=None - onepage=False - -<%init> - item = toc.get_by_path(path) - if item is None: - raise "path: " + path - - - - -
    - -<%python> - content = m.content() - re2 = re.compile(r"'''PYESC(.+?)PYESC'''", re.S) - content = re2.sub(lambda m: m.group(1), content) - - -% if item.depth > 1: -

    <% description or item.description %>

    -% - -
    - <% content %> -
    - -% if onepage or item.depth > 1: -% if (item.next and item.next.depth >= item.depth): - back to section top -% -% else: - back to section top - <& nav.myt:pagenav, item=item, onepage=onepage &> -% -
    - - - - -<%method formatplain> - <%filter> - import re - f = re.sub(r'\n[\s\t]*\n[\s\t]*', '

    \n

    ', f) - f = "

    " + f + "

    " - return f - -<% m.content() | h%> - - - - - -<%method codeline trim="both"> -<% m.content() %> - - -<%method code autoflush=False> -<%args> - title = None - syntaxtype = 'python' - html_escape = False - use_sliders = False - - -<%init> - def fix_indent(f): - f =string.expandtabs(f, 4) - g = '' - lines = string.split(f, "\n") - whitespace = None - for line in lines: - if whitespace is None: - match = re.match(r"^([ ]*).+", line) - if match is not None: - whitespace = match.group(1) - - if whitespace is not None: - line = re.sub(r"^%s" % whitespace, "", line) - - if whitespace is not None or re.search(r"\w", line) is not None: - g += (line + "\n") - - - return g.rstrip() - - p = re.compile(r'
    (.*?)
    ', re.S) - def hlight(match): - return "
    " + highlight.highlight(fix_indent(match.group(1)), html_escape = html_escape, syntaxtype = syntaxtype) + "
    " - content = p.sub(hlight, "
    " + m.content() + "
    ") - -
    "> -% if title is not None: -
    <% title %>
    -% -<% content %>
    - - - - - -<%method popboxlink trim="both"> - <%args> - name=None - show='show' - hide='hide' - - <%init> - if name is None: - name = m.attributes.setdefault('popbox_name', 0) - name += 1 - m.attributes['popbox_name'] = name - name = "popbox_" + repr(name) - -javascript:togglePopbox('<% name %>', '<% show %>', '<% hide %>') - - -<%method popbox trim="both"> -<%args> - name = None - class_ = None - -<%init> - if name is None: - name = 'popbox_' + repr(m.attributes['popbox_name']) - - - - -<%method poplink trim="both"> - <%args> - link='sql' - - <%init> - href = m.scomp('SELF:popboxlink') - - '''PYESC<& nav.myt:link, href=href, text=link, class_="codepoplink" &>PYESC''' - - -<%method codepopper trim="both"> - <%init> - c = m.content() - c = re.sub(r'\n', '
    \n', c.strip()) - -
    <&|SELF:popbox, class_="codepop" &><% c %>
    -
    -
    -<%method poppedcode trim="both">
    -	<%init>
    -		c = m.content()
    -		c = re.sub(r'\n', '
    \n', c.strip()) - -
    <% c %>
    -
    diff --git a/tests/examplefiles/test.ncl b/tests/examplefiles/test.ncl
    deleted file mode 100644
    index f20f8159..00000000
    --- a/tests/examplefiles/test.ncl
    +++ /dev/null
    @@ -1,20 +0,0 @@
    -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
    -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
    -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
    -begin
    -    int_num = 1
    -    float_num = 0.1
    -    str = "A void map"
    -    array = (/1, 2, 3, 4, 5/)
    -
    -
    -    wks = gsn_open_wks("X11", "test_for_pygments")
    -
    -    res = True
    -    res@mpMinLonF = 90.
    -    res@mpMaxLonF = 180.
    -    res@mpMinLatF = 0.
    -    res@mpMaxLatF = 90.
    -
    -    plot = gsn_csm_map_ce(wks, res)
    -end
    \ No newline at end of file
    diff --git a/tests/examplefiles/test.nim b/tests/examplefiles/test.nim
    deleted file mode 100644
    index 20610bb6..00000000
    --- a/tests/examplefiles/test.nim
    +++ /dev/null
    @@ -1,93 +0,0 @@
    -import re
    -
    -for x in lines("myfile.txt"):
    -  if x =~ re"(\w+)=(.*)":
    -    echo "Key: ", matches[0],
    -         " Value: ", matches[1]
    -
    -Echo("What's your name? ")
    -var name: string = readLine(stdin)
    -if name == "":
    -  echo("Poor soul, you lost your name?")
    -elif name == "name":
    -  echo("Very funny, your name is name.")
    -else:
    -  Echo("Hi, ", name, "!")
    -
    -var name = readLine(stdin)
    -case name
    -of "":
    -  echo("Poor soul, you lost your name?")
    -of "name":
    -  echo("Very funny, your name is name.")
    -else:
    -  Echo("Hi, ", name, "!")
    -
    -from strutils import parseInt
    -
    -Echo("A number please: ")
    -var n = parseInt(readLine(stdin))
    -case n
    -of 0..2, 4..7: Echo("The number is in the set: {0, 1, 2, 4, 5, 6, 7}")
    -of 3, 8: Echo("The number is 3 or 8")
    -
    -Echo("Counting to 10: ")
    -var i = 1
    -while i <= 10:
    -  Echo($i)
    -  inc(i)
    -
    -proc yes(question: string): bool =
    -  Echo(question, " (y/n)")
    -  while true:
    -    case readLine(stdin)
    -    of "y", "Y", "yes", "Yes": return true
    -    of "n", "N", "no", "No": return false
    -    else: Echo("Please be clear: yes or no")
    -
    -proc even(n: int): bool
    -
    -proc odd(n: int): bool =
    -  if n == 1: return true
    -  else: return even(n-1)
    -
    -iterator countup(a, b: int): int =
    -  var res = a
    -  while res <= b:
    -    yield res
    -    inc(res)
    -
    -type
    -  TPerson = object of TObject
    -    name*: string  # the * means that `name` is accessible from other modules
    -    age: int       # no * means that the field is hidden from other modules
    -
    -  TStudent = object of TPerson # TStudent inherits from TPerson
    -    id: int                    # with an id field
    -
    -var
    -  student: TStudent
    -  person: TPerson
    -assert(student is TStudent)
    -
    -echo({'a', 'b', 'c'}.card)
    -stdout.writeln("Hallo")
    -var
    -  f: TFile
    -if open(f, "numbers.txt"):
    -  try:
    -    var a = readLine(f)
    -    var b = readLine(f)
    -    echo("sum: " & $(parseInt(a) + parseInt(b)))
    -  except EOverflow:
    -    echo("overflow!")
    -  except EInvalidValue:
    -    echo("could not convert string to integer")
    -  except EIO:
    -    echo("IO error!")
    -  except:
    -    echo("Unknown exception!")
    -    # reraise the unknown exception:
    -    raise
    -  finally:
    -    close(f)
    \ No newline at end of file
    diff --git a/tests/examplefiles/test.odin b/tests/examplefiles/test.odin
    deleted file mode 100644
    index 05b01d22..00000000
    --- a/tests/examplefiles/test.odin
    +++ /dev/null
    @@ -1,43 +0,0 @@
    ---
    --- Example of a fragment of an openEHR Archetype, written in the Object Data Instance Notation (ODIN)
    --- Definition available here: https://github.com/openEHR/odin
    --- Author: Thomas Beale
    ---
    -
    -    original_author = <
    -        ["name"] = <"Dr J Joyce">
    -        ["organisation"] = <"NT Health Service">
    -        ["date"] = <2003-08-03>
    -    >
    -    term_bindings = <
    -        ["umls"] = <
    -            ["id1"] =  -- apgar result
    -            ["id2"] =  -- 1-minute event 
    -        >
    -    >
    -    lifecycle_state =  <"initial">
    -    resource_package_uri =  <"http://www.aihw.org.au/data_sets/diabetic_archetypes.html">
    -
    -    details = <
    -        ["en"] = <
    -            language = <[iso_639-1::en]>
    -            purpose =  <"archetype for diabetic patient review">
    -            use = <"used for all hospital or clinic-based diabetic reviews, 
    -                including first time. Optional sections are removed according to the particular review"
    -            >
    -            misuse = <"not appropriate for pre-diagnosis use">
    -            original_resource_uri = <"http://www.healthdata.org.au/data_sets/diabetic_review_data_set_1.html">
    -        >
    -        ["de"] = <
    -            language = <[iso_639-1::de]>
    -            purpose =  <"Archetyp für die Untersuchung von Patienten mit Diabetes">
    -            use = <"wird benutzt für alle Diabetes-Untersuchungen im
    -                    Krankenhaus, inklusive der ersten Vorstellung. Optionale
    -                    Abschnitte werden in Abhängigkeit von der speziellen
    -                    Vorstellung entfernt."
    -            >
    -            misuse = <"nicht geeignet für Benutzung vor Diagnosestellung">
    -            original_resource_uri = <"http://www.healthdata.org.au/data_sets/diabetic_review_data_set_1.html">
    -        >
    -    >
    -	
    diff --git a/tests/examplefiles/test.opa b/tests/examplefiles/test.opa
    deleted file mode 100644
    index ec287ac5..00000000
    --- a/tests/examplefiles/test.opa
    +++ /dev/null
    @@ -1,10 +0,0 @@
    -function sample_page() {
    -  
    -

    HTML in Opa

    -
    -
    -
    -

    Learning by examples.

    -
    -
    -} diff --git a/tests/examplefiles/test.orc b/tests/examplefiles/test.orc deleted file mode 100644 index d113303e..00000000 --- a/tests/examplefiles/test.orc +++ /dev/null @@ -1,81 +0,0 @@ -/* - * comment - */ -; comment -// comment - -instr/**/1,/**/N_a_M_e_,/**/+Name/**/// - iDuration = p3 - outc:a(aSignal) -endin - -opcode/**/aUDO,/**/i[],/**/aik// - aUDO -endop - -123 0123456789 -0xabcdef0123456789 0XABCDEF -1e2 3e+4 5e-6 7E8 9E+0 1E-2 3. 4.56 .789 - -"characters$MACRO." -"\\\a\b\n\r\t\012\345\67\"" - -{{ -characters$MACRO. -}} -{{\\\a\b\n\r\t\"\012\345\67}} - -+ - ~ ¬ ! * / ^ % << >> < > <= >= == != & # | && || ? : += -= *= /= - -0dbfs A4 kr ksmps nchnls nchnls_i sr - -do else elseif endif enduntil fi if ithen kthen od then until while -return rireturn - -aLabel: - label2: - -goto aLabel -reinit aLabel -cggoto 1==0, aLabel -timout 0, 0, aLabel -loop_ge 0, 0, 0, aLabel - -prints "%! %% %n%N %r%R %t%T \\a\\A \\b\\B \\n\\N \\r\\R \\t\\T" -prints Soutput - -readscore {{ -i 1 0 0 -}} -pyrun {{ -# Python -}} -lua_exec {{ --- Lua -}} - -#include/**/"file.udo" -#include/**/|file.udo| - -#ifdef MACRO -#else -#ifndef MACRO -#endif -#undef MACRO - -# define MACRO#macro_body# -#define/**/ -MACRO/**/ -#\#macro -body\## - -#define MACRO(ARG1#ARG2) #macro_body# -#define/**/ -MACRO(ARG1'ARG2'ARG3)/**/ -#\#macro -body\## - -$MACRO $MACRO. -$MACRO(x) -@0 -@@ 1 diff --git a/tests/examplefiles/test.p6 b/tests/examplefiles/test.p6 deleted file mode 100644 index 3d12b56c..00000000 --- a/tests/examplefiles/test.p6 +++ /dev/null @@ -1,252 +0,0 @@ -#!/usr/bin/env perl6 - -use v6; - -my $string = 'I look like a # comment!'; - -if $string eq 'foo' { - say 'hello'; -} - -regex http-verb { - 'GET' - | 'POST' - | 'PUT' - | 'DELETE' - | 'TRACE' - | 'OPTIONS' - | 'HEAD' -} - -# a sample comment - -say 'Hello from Perl 6!' - - -#`{ -multi-line comment! -} - -say 'here'; - -#`( -multi-line comment! -) - -say 'here'; - -#`{{{ -I'm a special comment! -}}} - -say 'there'; - -#`{{ -I'm { even } specialer! -}} - -say 'there'; - -#`{{ -does {{nesting}} work? -}} - -#`«< -trying mixed delimiters -» - -my $string = qq; -my $string = qq«Hooray, arbitrary delimiter!»; -my $string = q ; -my $string = qq<>; - -my %hash := Hash.new; - -=begin pod - -Here's some POD! Wooo - -=end pod - -=for Testing - This is POD (see? role isn't highlighted) - -say('this is not!'); - -=table - Of role things - -say('not in your table'); -#= A single line declarator "block" (with a keyword like role) -#| Another single line declarator "block" (with a keyword like role) -#={ - A declarator block (with a keyword like role) - } -#|{ - Another declarator block (with a keyword like role) - } -#= { A single line declarator "block" with a brace (with a keyword like role) -#=« - More declarator blocks! (with a keyword like role) - » -#|« - More declarator blocks! (with a keyword like role) - » - -say 'Moar code!'; - -my $don't = 16; - -sub don't($x) { - !$x -} - -say don't 'foo'; - -my %hash = ( - :foo(1), -); - -say %hash; -say %hash<>; -say %hash«foo»; - -say %*hash; -say %*hash<>; -say %*hash«foo»; - -say $; -say $; - -for (@A Z @B) -> $a, $b { - say $a + $b; -} - -Q:PIR { - .loadlib "somelib" -} - -my $longstring = q/ - lots - of - text -/; - -my $heredoc = q:to/END_SQL/; -SELECT * FROM Users -WHERE first_name = 'Rob' -END_SQL -my $hello; - -# Fun with regexen - -if 'food' ~~ /foo/ { - say 'match!' -} - -my $re = /foo/; -my $re2 = m/ foo /; -my $re3 = m:i/ FOO /; - -call-a-sub(/ foo /); -call-a-sub(/ foo \/ bar /); - -my $re4 = rx/something | something-else/; -my $result = ms/regexy stuff/; -my $sub0 = s/regexy stuff/more stuff/; -my $sub = ss/regexy stuff/more stuff/; -my $trans = tr/regexy stuff/more stuff/; - -my @values = ; -call-sub(); -call-sub ; - -my $result = $a < $b; - -for -> $letter { - say $letter; -} - -sub test-sub { - say @_; - say $!; - say $/; - say $0; - say $1; - say @*ARGS; - say $*ARGFILES; - say &?BLOCK; - say ::?CLASS; - say $?CLASS; - say @=COMMENT; - say %?CONFIG; - say $*CWD; - say $=data; - say %?DEEPMAGIC; - say $?DISTRO; - say $*DISTRO; - say $*EGID; - say %*ENV; - say $*ERR; - say $*EUID; - say $*EXECUTABLE_NAME; - say $?FILE; - say $?GRAMMAR; - say $*GID; - say $*IN; - say @*INC; - say %?LANG; - say $*LANG; - say $?LINE; - say %*META-ARGS; - say $?MODULE; - say %*OPTS; - say %*OPT; - say $?KERNEL; - say $*KERNEL; - say $*OUT; - say $?PACKAGE; - say $?PERL; - say $*PERL; - say $*PID; - say %=pod; - say $*PROGRAM_NAME; - say %*PROTOCOLS; - say ::?ROLE; - say $?ROLE; - say &?ROUTINE; - say $?SCOPE; - say $*TZ; - say $*UID; - say $?USAGE; - say $?VM; - say $?XVM; -} - -say ; - -my $perl5_re = m:P5/ fo{2} /; -my $re5 = rx«something | something-else»; - -my $M := %*COMPILING<%?OPTIONS>; - -say $M; - -sub regex-name { ... } -my $pair = role-name => 'foo'; -$pair = rolesque => 'foo'; - -my sub something(Str:D $value) { ... } - -my $s = q«< -some -string -stuff -»; - -my $regex = m«< some chars »; -# after - -say $/; - -roleq; diff --git a/tests/examplefiles/test.pan b/tests/examplefiles/test.pan deleted file mode 100644 index 56c8bd62..00000000 --- a/tests/examplefiles/test.pan +++ /dev/null @@ -1,54 +0,0 @@ -object template pantest; - -# Very simple pan test file -"/long/decimal" = 123; -"/long/octal" = 0755; -"/long/hexadecimal" = 0xFF; - -"/double/simple" = 0.01; -"/double/pi" = 3.14159; -"/double/exponent" = 1e-8; -"/double/scientific" = 1.3E10; - -"/string/single" = 'Faster, but escapes like \t, \n and \x3d don''t work, but '' should work.'; -"/string/double" = "Slower, but escapes like \t, \n and \x3d do work"; - -variable TEST = 2; - -"/x2" = to_string(TEST); -"/x2" ?= 'Default value'; - -"/x3" = 1 + 2 + value("/long/decimal"); - -"/x4" = undef; - -"/x5" = null; - -variable e ?= error("Test error message"); - -# include gmond config for services-monitoring -include { 'site/ganglia/gmond/services-monitoring' }; - -"/software/packages"=pkg_repl("httpd","2.2.3-43.sl5.3",PKG_ARCH_DEFAULT); -"/software/packages"=pkg_repl("php"); - -# Example function -function show_things_view_for_stuff = { - thing = ARGV[0]; - foreach( i; mything; STUFF ) { - if ( thing == mything ) { - return( true ); - } else { - return SELF; - }; - }; - false; -}; - -variable HERE = < '\' then - RootFolder := RootFolder + '\'; - ZeroMemory(@wfd, sizeof(wfd)); - wfd.dwFileAttributes := FILE_ATTRIBUTE_NORMAL; - if Recurse then - begin - hFindFile := FindFirstFile(pointer(RootFolder + '*.*'), wfd); - if hFindFile <> 0 then - try - repeat - if wfd.dwFileAttributes and FILE_ATTRIBUTE_DIRECTORY = FILE_ATTRIBUTE_DIRECTORY then - begin - if (string(wfd.cFileName) <> '.') and (string(wfd.cFileName) <> '..') then - begin - CountFolders(Handle, RootFolder + wfd.cFileName, Recurse); - end; - end; - until FindNextFile(hFindFile, wfd) = False; - Inc(CntFolders); - finally - Windows.FindClose(hFindFile); - end; - end; -end; - -//////////////////////////////////////////////////////////////////////////////// -// -// FindAllFiles -// -procedure FindAllFiles(Handle: THandle; RootFolder: string; Mask: string; Recurse: Boolean = True); -var - hFindFile : THandle; - wfd : TWin32FindData; -begin - if RootFolder[length(RootFolder)] <> '\' then - RootFolder := RootFolder + '\'; - ZeroMemory(@wfd, sizeof(wfd)); - wfd.dwFileAttributes := FILE_ATTRIBUTE_NORMAL; - if Recurse then - begin - hFindFile := FindFirstFile(pointer(RootFolder + '*.*'), wfd); - if hFindFile <> 0 then - try - repeat - if wfd.dwFileAttributes and FILE_ATTRIBUTE_DIRECTORY = FILE_ATTRIBUTE_DIRECTORY then - begin - if (string(wfd.cFileName) <> '.') and (string(wfd.cFileName) <> '..') then - begin - FindAllFiles(Handle, RootFolder + wfd.cFileName, Mask, Recurse); - end; - end; - until FindNextFile(hFindFile, wfd) = False; - Inc(NumFolder); - SendMessage(Handle, FFM_ONDIRFOUND, NumFolder, lParam(string(RootFolder))); - finally - Windows.FindClose(hFindFile); - end; - end; - hFindFile := FindFirstFile(pointer(RootFolder + Mask), wfd); - if hFindFile <> INVALID_HANDLE_VALUE then - try - repeat - if (wfd.dwFileAttributes and FILE_ATTRIBUTE_DIRECTORY <> FILE_ATTRIBUTE_DIRECTORY) then - begin - SendMessage(Handle, FFM_ONFILEFOUND, 0, lParam(string(RootFolder + wfd.cFileName))); - end; - until FindNextFile(hFindFile, wfd) = False; - finally - Windows.FindClose(hFindFile); - end; -end; - - -property test: boolean read ftest write ftest; -procedure test: boolean read ftest write ftest; - -// -// This sourcecode is part of omorphia -// - -Function IsValidHandle(Const Handle: THandle): Boolean; {$IFDEF OMORPHIA_FEATURES_USEASM} Assembler; -Asm - TEST EAX, EAX - JZ @@Finish - NOT EAX - TEST EAX, EAX - SETNZ AL - - {$IFDEF WINDOWS} - JZ @@Finish - - //Save the handle against modifications or loss - PUSH EAX - - //reserve some space for a later duplicate - PUSH EAX - - //Check if we are working on NT-Platform - CALL IsWindowsNTSystem - TEST EAX, EAX - JZ @@NoNTSystem - - PUSH DWORD PTR [ESP] - LEA EAX, DWORD PTR [ESP+$04] - PUSH EAX - CALL GetHandleInformation - TEST EAX, EAX - JNZ @@Finish2 - -@@NoNTSystem: - //Result := DuplicateHandle(GetCurrentProcess, Handle, GetCurrentProcess, - // @Duplicate, 0, False, DUPLICATE_SAME_ACCESS); - PUSH DUPLICATE_SAME_ACCESS - PUSH $00000000 - PUSH $00000000 - LEA EAX, DWORD PTR [ESP+$0C] - PUSH EAX - CALL GetCurrentProcess - PUSH EAX - PUSH DWORD PTR [ESP+$18] - PUSH EAX - CALL DuplicateHandle - - TEST EAX, EAX - JZ @@Finish2 - - // Result := CloseHandle(Duplicate); - PUSH DWORD PTR [ESP] - CALL CloseHandle - -@@Finish2: - POP EDX - POP EDX - - PUSH EAX - PUSH $00000000 - CALL SetLastError - POP EAX - {$ENDIF} - -@@Finish: -End; -{$ELSE} -Var - Duplicate: THandle; - Flags: DWORD; -Begin - If IsWinNT Then - Result := GetHandleInformation(Handle, Flags) - Else - Result := False; - If Not Result Then - Begin - // DuplicateHandle is used as an additional check for those object types not - // supported by GetHandleInformation (e.g. according to the documentation, - // GetHandleInformation doesn't support window stations and desktop although - // tests show that it does). GetHandleInformation is tried first because its - // much faster. Additionally GetHandleInformation is only supported on NT... - Result := DuplicateHandle(GetCurrentProcess, Handle, GetCurrentProcess, - @Duplicate, 0, False, DUPLICATE_SAME_ACCESS); - If Result Then - Result := CloseHandle(Duplicate); - End; -End; -{$ENDIF} - - - - -{*******************************************************} -{ } -{ Delphi Supplemental Components } -{ ZLIB Data Compression Interface Unit } -{ } -{ Copyright (c) 1997 Borland International } -{ } -{*******************************************************} - -{ Modified for zlib 1.1.3 by Davide Moretti Z_STREAM_END do - begin - P := OutBuf; - Inc(OutBytes, 256); - ReallocMem(OutBuf, OutBytes); - strm.next_out := PChar(Integer(OutBuf) + (Integer(strm.next_out) - Integer(P))); - strm.avail_out := 256; - end; - finally - CCheck(deflateEnd(strm)); - end; - ReallocMem(OutBuf, strm.total_out); - OutBytes := strm.total_out; - except - FreeMem(OutBuf); - raise - end; -end; - - -procedure DecompressBuf(const InBuf: Pointer; InBytes: Integer; - OutEstimate: Integer; out OutBuf: Pointer; out OutBytes: Integer); -var - strm: TZStreamRec; - P: Pointer; - BufInc: Integer; -begin - FillChar(strm, sizeof(strm), 0); - BufInc := (InBytes + 255) and not 255; - if OutEstimate = 0 then - OutBytes := BufInc - else - OutBytes := OutEstimate; - GetMem(OutBuf, OutBytes); - try - strm.next_in := InBuf; - strm.avail_in := InBytes; - strm.next_out := OutBuf; - strm.avail_out := OutBytes; - DCheck(inflateInit_(strm, zlib_version, sizeof(strm))); - try - while DCheck(inflate(strm, Z_FINISH)) <> Z_STREAM_END do - begin - P := OutBuf; - Inc(OutBytes, BufInc); - ReallocMem(OutBuf, OutBytes); - strm.next_out := PChar(Integer(OutBuf) + (Integer(strm.next_out) - Integer(P))); - strm.avail_out := BufInc; - end; - finally - DCheck(inflateEnd(strm)); - end; - ReallocMem(OutBuf, strm.total_out); - OutBytes := strm.total_out; - except - FreeMem(OutBuf); - raise - end; -end; - - -// TCustomZlibStream - -constructor TCustomZLibStream.Create(Strm: TStream); -begin - inherited Create; - FStrm := Strm; - FStrmPos := Strm.Position; -end; - -procedure TCustomZLibStream.Progress(Sender: TObject); -begin - if Assigned(FOnProgress) then FOnProgress(Sender); -end; - - -// TCompressionStream - -constructor TCompressionStream.Create(CompressionLevel: TCompressionLevel; - Dest: TStream); -const - Levels: array [TCompressionLevel] of ShortInt = - (Z_NO_COMPRESSION, Z_BEST_SPEED, Z_DEFAULT_COMPRESSION, Z_BEST_COMPRESSION); -begin - inherited Create(Dest); - FZRec.next_out := FBuffer; - FZRec.avail_out := sizeof(FBuffer); - CCheck(deflateInit_(FZRec, Levels[CompressionLevel], zlib_version, sizeof(FZRec))); -end; - -destructor TCompressionStream.Destroy; -begin - FZRec.next_in := nil; - FZRec.avail_in := 0; - try - if FStrm.Position <> FStrmPos then FStrm.Position := FStrmPos; - while (CCheck(deflate(FZRec, Z_FINISH)) <> Z_STREAM_END) - and (FZRec.avail_out = 0) do - begin - FStrm.WriteBuffer(FBuffer, sizeof(FBuffer)); - FZRec.next_out := FBuffer; - FZRec.avail_out := sizeof(FBuffer); - end; - if FZRec.avail_out < sizeof(FBuffer) then - FStrm.WriteBuffer(FBuffer, sizeof(FBuffer) - FZRec.avail_out); - finally - deflateEnd(FZRec); - end; - inherited Destroy; -end; - -function TCompressionStream.Read(var Buffer; Count: Longint): Longint; -begin - raise ECompressionError.Create('Invalid stream operation'); -end; - -function TCompressionStream.Write(const Buffer; Count: Longint): Longint; -begin - FZRec.next_in := @Buffer; - FZRec.avail_in := Count; - if FStrm.Position <> FStrmPos then FStrm.Position := FStrmPos; - while (FZRec.avail_in > 0) do - begin - CCheck(deflate(FZRec, 0)); - if FZRec.avail_out = 0 then - begin - FStrm.WriteBuffer(FBuffer, sizeof(FBuffer)); - FZRec.next_out := FBuffer; - FZRec.avail_out := sizeof(FBuffer); - FStrmPos := FStrm.Position; - Progress(Self); - end; - end; - Result := Count; -end; - -function TCompressionStream.Seek(Offset: Longint; Origin: Word): Longint; -begin - if (Offset = 0) and (Origin = soFromCurrent) then - Result := FZRec.total_in - else - raise ECompressionError.Create('Invalid stream operation'); -end; - -function TCompressionStream.GetCompressionRate: Single; -begin - if FZRec.total_in = 0 then - Result := 0 - else - Result := (1.0 - (FZRec.total_out / FZRec.total_in)) * 100.0; -end; - - -// TDecompressionStream - -constructor TDecompressionStream.Create(Source: TStream); -begin - inherited Create(Source); - FZRec.next_in := FBuffer; - FZRec.avail_in := 0; - DCheck(inflateInit_(FZRec, zlib_version, sizeof(FZRec))); -end; - -destructor TDecompressionStream.Destroy; -begin - inflateEnd(FZRec); - inherited Destroy; -end; - -function TDecompressionStream.Read(var Buffer; Count: Longint): Longint; -begin - FZRec.next_out := @Buffer; - FZRec.avail_out := Count; - if FStrm.Position <> FStrmPos then FStrm.Position := FStrmPos; - while (FZRec.avail_out > 0) do - begin - if FZRec.avail_in = 0 then - begin - FZRec.avail_in := FStrm.Read(FBuffer, sizeof(FBuffer)); - if FZRec.avail_in = 0 then - begin - Result := Count - FZRec.avail_out; - Exit; - end; - FZRec.next_in := FBuffer; - FStrmPos := FStrm.Position; - Progress(Self); - end; - DCheck(inflate(FZRec, 0)); - end; - Result := Count; -end; - -function TDecompressionStream.Write(const Buffer; Count: Longint): Longint; -begin - raise EDecompressionError.Create('Invalid stream operation'); -end; - -function TDecompressionStream.Seek(Offset: Longint; Origin: Word): Longint; -var - I: Integer; - Buf: array [0..4095] of Char; -begin - if (Offset = 0) and (Origin = soFromBeginning) then - begin - DCheck(inflateReset(FZRec)); - FZRec.next_in := FBuffer; - FZRec.avail_in := 0; - FStrm.Position := 0; - FStrmPos := 0; - end - else if ( (Offset >= 0) and (Origin = soFromCurrent)) or - ( ((Offset - FZRec.total_out) > 0) and (Origin = soFromBeginning)) then - begin - if Origin = soFromBeginning then Dec(Offset, FZRec.total_out); - if Offset > 0 then - begin - for I := 1 to Offset div sizeof(Buf) do - ReadBuffer(Buf, sizeof(Buf)); - ReadBuffer(Buf, Offset mod sizeof(Buf)); - end; - end - else - raise EDecompressionError.Create('Invalid stream operation'); - Result := FZRec.total_out; -end; - -end. diff --git a/tests/examplefiles/test.php b/tests/examplefiles/test.php deleted file mode 100644 index e8efdc6a..00000000 --- a/tests/examplefiles/test.php +++ /dev/null @@ -1,544 +0,0 @@ - - * @copyright Copyright (c) 2006, Manni - * @version 1.0 - * @link http://www.pkware.com/business_and_developers/developer/popups/appnote.txt - * @link http://mannithedark.is-a-geek.net/ - * @since 1.0 - * @package fnord.bb - * @subpackage archive - */ -class Zip extends Archive { - /** - * Outputs the zip file - * - * This function creates the zip file with the dirs and files given. - * If the optional parameter $file is given, the zip file is will be - * saved at that location. Otherwise the function returns the zip file's content. - * - * @access public - * - * @link http://www.pkware.com/business_and_developers/developer/popups/appnote.txt - * @param string $filename The path where the zip file will be saved - * - * @return bool|string Returns either true if the fil is sucessfully created or the content of the zip file - */ - function out($filename = false) { - // Empty output - $file_data = array(); // Data of the file part - $cd_data = array(); // Data of the central directory - - // Sort dirs and files by path length - uksort($this->dirs, 'sort_by_length'); - uksort($this->files, 'sort_by_length'); - - // Handle dirs - foreach($this->dirs as $dir) { - $dir .= '/'; - // File part - - // Reset dir data - $dir_data = ''; - - // Local file header - $dir_data .= "\x50\x4b\x03\x04"; // Local file header signature - $dir_data .= pack("v", 10); // Version needed to extract - $dir_data .= pack("v", 0); // General purpose bit flag - $dir_data .= pack("v", 0); // Compression method - $dir_data .= pack("v", 0); // Last mod file time - $dir_data .= pack("v", 0); // Last mod file date - $dir_data .= pack("V", 0); // crc-32 - $dir_data .= pack("V", 0); // Compressed size - $dir_data .= pack("V", 0); // Uncompressed size - $dir_data .= pack("v", strlen($dir)); // File name length - $dir_data .= pack("v", 0); // Extra field length - - $dir_data .= $dir; // File name - $dir_data .= ''; // Extra field (is empty) - - // File data - $dir_data .= ''; // Dirs have no file data - - // Data descriptor - $dir_data .= pack("V", 0); // crc-32 - $dir_data .= pack("V", 0); // Compressed size - $dir_data .= pack("V", 0); // Uncompressed size - - // Save current offset - $offset = strlen(implode('', $file_data)); - - // Append dir data to the file part - $file_data[] = $dir_data; - - // Central directory - - // Reset dir data - $dir_data = ''; - - // File header - $dir_data .= "\x50\x4b\x01\x02"; // Local file header signature - $dir_data .= pack("v", 0); // Version made by - $dir_data .= pack("v", 10); // Version needed to extract - $dir_data .= pack("v", 0); // General purpose bit flag - $dir_data .= pack("v", 0); // Compression method - $dir_data .= pack("v", 0); // Last mod file time - $dir_data .= pack("v", 0); // Last mod file date - $dir_data .= pack("V", 0); // crc-32 - $dir_data .= pack("V", 0); // Compressed size - $dir_data .= pack("V", 0); // Uncompressed size - $dir_data .= pack("v", strlen($dir)); // File name length - $dir_data .= pack("v", 0); // Extra field length - $dir_data .= pack("v", 0); // File comment length - $dir_data .= pack("v", 0); // Disk number start - $dir_data .= pack("v", 0); // Internal file attributes - $dir_data .= pack("V", 16); // External file attributes - $dir_data .= pack("V", $offset); // Relative offset of local header - - $dir_data .= $dir; // File name - $dir_data .= ''; // Extra field (is empty) - $dir_data .= ''; // File comment (is empty) - - /* - // Data descriptor - $dir_data .= pack("V", 0); // crc-32 - $dir_data .= pack("V", 0); // Compressed size - $dir_data .= pack("V", 0); // Uncompressed size - */ - - // Append dir data to the central directory data - $cd_data[] = $dir_data; - } - - // Handle files - foreach($this->files as $name => $file) { - // Get values - $content = $file[0]; - - // File part - - // Reset file data - $fd = ''; - - // Detect possible compressions - // Use deflate - if(function_exists('gzdeflate')) { - $method = 8; - - // Compress file content - $compressed_data = gzdeflate($content); - - // Use bzip2 - } elseif(function_exists('bzcompress')) { - $method = 12; - - // Compress file content - $compressed_data = bzcompress($content); - - // No compression - } else { - $method = 0; - - // Do not compress the content :P - $compressed_data = $content; - } - - // Local file header - $fd .= "\x50\x4b\x03\x04"; // Local file header signature - $fd .= pack("v", 20); // Version needed to extract - $fd .= pack("v", 0); // General purpose bit flag - $fd .= pack("v", $method); // Compression method - $fd .= pack("v", 0); // Last mod file time - $fd .= pack("v", 0); // Last mod file date - $fd .= pack("V", crc32($content)); // crc-32 - $fd .= pack("V", strlen($compressed_data)); // Compressed size - $fd .= pack("V", strlen($content)); // Uncompressed size - $fd .= pack("v", strlen($name)); // File name length - $fd .= pack("v", 0); // Extra field length - - $fd .= $name; // File name - $fd .= ''; // Extra field (is empty) - - // File data - $fd .= $compressed_data; - - // Data descriptor - $fd .= pack("V", crc32($content)); // crc-32 - $fd .= pack("V", strlen($compressed_data)); // Compressed size - $fd .= pack("V", strlen($content)); // Uncompressed size - - // Save current offset - $offset = strlen(implode('', $file_data)); - - // Append file data to the file part - $file_data[] = $fd; - - // Central directory - - // Reset file data - $fd = ''; - - // File header - $fd .= "\x50\x4b\x01\x02"; // Local file header signature - $fd .= pack("v", 0); // Version made by - $fd .= pack("v", 20); // Version needed to extract - $fd .= pack("v", 0); // General purpose bit flag - $fd .= pack("v", $method); // Compression method - $fd .= pack("v", 0); // Last mod file time - $fd .= pack("v", 0); // Last mod file date - $fd .= pack("V", crc32($content)); // crc-32 - $fd .= pack("V", strlen($compressed_data)); // Compressed size - $fd .= pack("V", strlen($content)); // Uncompressed size - $fd .= pack("v", strlen($name)); // File name length - $fd .= pack("v", 0); // Extra field length - $fd .= pack("v", 0); // File comment length - $fd .= pack("v", 0); // Disk number start - $fd .= pack("v", 0); // Internal file attributes - $fd .= pack("V", 32); // External file attributes - $fd .= pack("V", $offset); // Relative offset of local header - - $fd .= $name; // File name - $fd .= ''; // Extra field (is empty) - $fd .= ''; // File comment (is empty) - - /* - // Data descriptor - $fd .= pack("V", crc32($content)); // crc-32 - $fd .= pack("V", strlen($compressed_data)); // Compressed size - $fd .= pack("V", strlen($content)); // Uncompressed size - */ - - // Append file data to the central directory data - $cd_data[] = $fd; - } - - // Digital signature - $digital_signature = ''; - $digital_signature .= "\x50\x4b\x05\x05"; // Header signature - $digital_signature .= pack("v", 0); // Size of data - $digital_signature .= ''; // Signature data (is empty) - - $tmp_file_data = implode('', $file_data); // File data - $tmp_cd_data = implode('', $cd_data). // Central directory - $digital_signature; // Digital signature - - // End of central directory - $eof_cd = ''; - $eof_cd .= "\x50\x4b\x05\x06"; // End of central dir signature - $eof_cd .= pack("v", 0); // Number of this disk - $eof_cd .= pack("v", 0); // Number of the disk with the start of the central directory - $eof_cd .= pack("v", count($cd_data)); // Total number of entries in the central directory on this disk - $eof_cd .= pack("v", count($cd_data)); // Total number of entries in the central directory - $eof_cd .= pack("V", strlen($tmp_cd_data)); // Size of the central directory - $eof_cd .= pack("V", strlen($tmp_file_data)); // Offset of start of central directory with respect to the starting disk number - $eof_cd .= pack("v", 0); // .ZIP file comment length - $eof_cd .= ''; // .ZIP file comment (is empty) - - // Content of the zip file - $data = $tmp_file_data. - // $extra_data_record. - $tmp_cd_data. - $eof_cd; - - // Return content? - if(!$filename) - return $data; - - // Write to file - return file_put_contents($filename, $data); - } - - /** - * Load a zip file - * - * This function loads the files and dirs from a zip file from the harddrive. - * - * @access public - * - * @param string $file The path to the zip file - * @param bool $reset Reset the files and dirs before adding the zip file's content? - * - * @return bool Returns true if the file was loaded sucessfully - */ - function load_file($file, $reset = true) { - // Check whether the file exists - if(!file_exists($file)) - return false; - - // Load the files content - $content = @file_get_contents($file); - - // Return false if the file cannot be opened - if(!$content) - return false; - - // Read the zip - return $this->load_string($content, $reset); - } - - /** - * Load a zip string - * - * This function loads the files and dirs from a string - * - * @access public - * - * @param string $string The string the zip is generated from - * @param bool $reset Reset the files and dirs before adding the zip file's content? - * - * @return bool Returns true if the string was loaded sucessfully - */ - function load_string($string, $reset = true) { - // Reset the zip? - if($reset) { - $this->dirs = array(); - $this->files = array(); - } - - // Get the starting position of the end of central directory record - $start = strpos($string, "\x50\x4b\x05\x06"); - - // Error - if($start === false) - die('Could not find the end of central directory record'); - - // Get the ecdr - $eof_cd = substr($string, $start+4, 18); - - // Unpack the ecdr infos - $eof_cd = unpack('vdisc1/'. - 'vdisc2/'. - 'ventries1/'. - 'ventries2/'. - 'Vsize/'. - 'Voffset/'. - 'vcomment_lenght', $eof_cd); - - // Do not allow multi disc zips - if($eof_cd['disc1'] != 0) - die('multi disk stuff is not yet implemented :/'); - - // Save the interesting values - $cd_entries = $eof_cd['entries1']; - $cd_size = $eof_cd['size']; - $cd_offset = $eof_cd['offset']; - - // Get the central directory record - $cdr = substr($string, $cd_offset, $cd_size); - - // Reset the position and the list of the entries - $pos = 0; - $entries = array(); - - // Handle cdr - while($pos < strlen($cdr)) { - // Check header signature - // Digital signature - if(substr($cdr, $pos, 4) == "\x50\x4b\x05\x05") { - // Get digital signature size - $tmp_info = unpack('vsize', substr($cdr, $pos + 4, 2)); - - // Read out the digital signature - $digital_sig = substr($header, $pos + 6, $tmp_info['size']); - - break; - } - - // Get file header - $header = substr($cdr, $pos, 46); - - // Unpack the header information - $header_info = @unpack('Vheader/'. - 'vversion_made_by/'. - 'vversion_needed/'. - 'vgeneral_purpose/'. - 'vcompression_method/'. - 'vlast_mod_time/'. - 'vlast_mod_date/'. - 'Vcrc32/'. - 'Vcompressed_size/'. - 'Vuncompressed_size/'. - 'vname_length/'. - 'vextra_length/'. - 'vcomment_length/'. - 'vdisk_number/'. - 'vinternal_attributes/'. - 'Vexternal_attributes/'. - 'Voffset', - $header); - - // Valid header? - if($header_info['header'] != 33639248) - return false; - - // New position - $pos += 46; - - // Read out the file name - $header_info['name'] = substr($cdr, $pos, $header_info['name_length']); - - // New position - $pos += $header_info['name_length']; - - // Read out the extra stuff - $header_info['extra'] = substr($cdr, $pos, $header_info['extra_length']); - - // New position - $pos += $header_info['extra_length']; - - // Read out the comment - $header_info['comment'] = substr($cdr, $pos, $header_info['comment_length']); - - // New position - $pos += $header_info['comment_length']; - - // Append this file/dir to the entry list - $entries[] = $header_info; - } - - // Check whether all entries where read sucessfully - if(count($entries) != $cd_entries) - return false; - - // Handle files/dirs - foreach($entries as $entry) { - // Is a dir? - if($entry['external_attributes'] & 16) { - $this->add_dir($entry['name']); - continue; - } - - // Get local file header - $header = substr($string, $entry['offset'], 30); - - // Unpack the header information - $header_info = @unpack('Vheader/'. - 'vversion_needed/'. - 'vgeneral_purpose/'. - 'vcompression_method/'. - 'vlast_mod_time/'. - 'vlast_mod_date/'. - 'Vcrc32/'. - 'Vcompressed_size/'. - 'Vuncompressed_size/'. - 'vname_length/'. - 'vextra_length', - $header); - - // Valid header? - if($header_info['header'] != 67324752) - return false; - - // Get content start position - $start = $entry['offset'] + 30 + $header_info['name_length'] + $header_info['extra_length']; - - // Get the compressed data - $data = substr($string, $start, $header_info['compressed_size']); - - // Detect compression type - switch($header_info['compression_method']) { - // No compression - case 0: - // Ne decompression needed - $content = $data; - break; - - // Gzip - case 8: - if(!function_exists('gzinflate')) - return false; - - // Uncompress data - $content = gzinflate($data); - break; - - // Bzip2 - case 12: - if(!function_exists('bzdecompress')) - return false; - - // Decompress data - $content = bzdecompress($data); - break; - - // Compression not supported -> error - default: - return false; - } - - // Try to add file - if(!$this->add_file($entry['name'], $content)) - return false; - } - - return true; - } -} - -function &byref() { - $x = array(); - return $x; -} - -// Test highlighting of magic methods and variables -class MagicClass { - public $magic_str; - public $ordinary_str; - - public function __construct($some_var) { - $this->magic_str = __FILE__; - $this->ordinary_str = $some_var; - } - - public function __toString() { - return $this->magic_str; - } - - public function nonMagic() { - return $this->ordinary_str; - } -} - -$magic = new MagicClass(__DIR__); -__toString(); -$magic->nonMagic(); -$magic->__toString(); - - echo << - diff --git a/tests/examplefiles/test.pig b/tests/examplefiles/test.pig deleted file mode 100644 index f67b0268..00000000 --- a/tests/examplefiles/test.pig +++ /dev/null @@ -1,148 +0,0 @@ -/** - * This script is an example recommender (using made up data) showing how you might modify item-item links - * by defining similar relations between items in a dataset and customizing the change in weighting. - * This example creates metadata by using the genre field as the metadata_field. The items with - * the same genre have it's weight cut in half in order to boost the signals of movies that do not have the same genre. - * This technique requires a customization of the standard GetItemItemRecommendations macro - */ -import 'recommenders.pig'; - - - -%default INPUT_PATH_PURCHASES '../data/retail/purchases.json' -%default INPUT_PATH_WISHLIST '../data/retail/wishlists.json' -%default INPUT_PATH_INVENTORY '../data/retail/inventory.json' -%default OUTPUT_PATH '../data/retail/out/modify_item_item' - - -/******** Custom GetItemItemRecommnedations *********/ -define recsys__GetItemItemRecommendations_ModifyCustom(user_item_signals, metadata) returns item_item_recs { - - -- Convert user_item_signals to an item_item_graph - ii_links_raw, item_weights = recsys__BuildItemItemGraph( - $user_item_signals, - $LOGISTIC_PARAM, - $MIN_LINK_WEIGHT, - $MAX_LINKS_PER_USER - ); - -- NOTE this function is added in order to combine metadata with item-item links - -- See macro for more detailed explination - ii_links_metadata = recsys__AddMetadataToItemItemLinks( - ii_links_raw, - $metadata - ); - - /********* Custom Code starts here ********/ - - --The code here should adjust the weights based on an item-item link and the equality of metadata. - -- In this case, if the metadata is the same, the weight is reduced. Otherwise the weight is left alone. - ii_links_adjusted = foreach ii_links_metadata generate item_A, item_B, - -- the amount of weight adjusted is dependant on the domain of data and what is expected - -- It is always best to adjust the weight by multiplying it by a factor rather than addition with a constant - (metadata_B == metadata_A ? (weight * 0.5): weight) as weight; - - - /******** Custom Code stops here *********/ - - -- remove negative numbers just incase - ii_links_adjusted_filt = foreach ii_links_adjusted generate item_A, item_B, - (weight <= 0 ? 0: weight) as weight; - -- Adjust the weights of the graph to improve recommendations. - ii_links = recsys__AdjustItemItemGraphWeight( - ii_links_adjusted_filt, - item_weights, - $BAYESIAN_PRIOR - ); - - -- Use the item-item graph to create item-item recommendations. - $item_item_recs = recsys__BuildItemItemRecommendationsFromGraph( - ii_links, - $NUM_RECS_PER_ITEM, - $NUM_RECS_PER_ITEM - ); -}; - - -/******* Load Data **********/ - ---Get purchase signals -purchase_input = load '$INPUT_PATH_PURCHASES' using org.apache.pig.piggybank.storage.JsonLoader( - 'row_id: int, - movie_id: chararray, - movie_name: chararray, - user_id: chararray, - purchase_price: int'); - ---Get wishlist signals -wishlist_input = load '$INPUT_PATH_WISHLIST' using org.apache.pig.piggybank.storage.JsonLoader( - 'row_id: int, - movie_id: chararray, - movie_name: chararray, - user_id: chararray'); - - -/******* Convert Data to Signals **********/ - --- Start with choosing 1 as max weight for a signal. -purchase_signals = foreach purchase_input generate - user_id as user, - movie_name as item, - 1.0 as weight; - - --- Start with choosing 0.5 as weight for wishlist items because that is a weaker signal than --- purchasing an item. -wishlist_signals = foreach wishlist_input generate - user_id as user, - movie_name as item, - 0.5 as weight; - -user_signals = union purchase_signals, wishlist_signals; - - -/******** Changes for Modifying item-item links ******/ -inventory_input = load '$INPUT_PATH_INVENTORY' using org.apache.pig.piggybank.storage.JsonLoader( - 'movie_title: chararray, - genres: bag{tuple(content:chararray)}'); - - -metadata = foreach inventory_input generate - FLATTEN(genres) as metadata_field, - movie_title as item; --- requires the macro to be written seperately - --NOTE this macro is defined within this file for clarity -item_item_recs = recsys__GetItemItemRecommendations_ModifyCustom(user_signals, metadata); -/******* No more changes ********/ - - -user_item_recs = recsys__GetUserItemRecommendations(user_signals, item_item_recs); - ---Completely unrelated code stuck in the middle -data = LOAD 's3n://my-s3-bucket/path/to/responses' - USING org.apache.pig.piggybank.storage.JsonLoader(); -responses = FOREACH data GENERATE object#'response' AS response: map[]; -out = FOREACH responses - GENERATE response#'id' AS id: int, response#'thread' AS thread: chararray, - response#'comments' AS comments: {t: (comment: chararray)}; -STORE out INTO 's3n://path/to/output' USING PigStorage('|'); - - -/******* Store recommendations **********/ - --- If your output folder exists already, hadoop will refuse to write data to it. - -rmf $OUTPUT_PATH/item_item_recs; -rmf $OUTPUT_PATH/user_item_recs; - -store item_item_recs into '$OUTPUT_PATH/item_item_recs' using PigStorage(); -store user_item_recs into '$OUTPUT_PATH/user_item_recs' using PigStorage(); - --- STORE the item_item_recs into dynamo -STORE item_item_recs - INTO '$OUTPUT_PATH/unused-ii-table-data' -USING com.mortardata.pig.storage.DynamoDBStorage('$II_TABLE', '$AWS_ACCESS_KEY_ID', '$AWS_SECRET_ACCESS_KEY'); - --- STORE the user_item_recs into dynamo -STORE user_item_recs - INTO '$OUTPUT_PATH/unused-ui-table-data' -USING com.mortardata.pig.storage.DynamoDBStorage('$UI_TABLE', '$AWS_ACCESS_KEY_ID', '$AWS_SECRET_ACCESS_KEY'); diff --git a/tests/examplefiles/test.plot b/tests/examplefiles/test.plot deleted file mode 100644 index cef0f908..00000000 --- a/tests/examplefiles/test.plot +++ /dev/null @@ -1,333 +0,0 @@ -# -# $Id: prob2.dem,v 1.9 2006/06/14 03:24:09 sfeam Exp $ -# -# Demo Statistical Approximations version 1.1 -# -# Copyright (c) 1991, Jos van der Woude, jvdwoude@hut.nl - -# History: -# -- --- 1991 Jos van der Woude: 1st version -# 06 Jun 2006 Dan Sebald: Added plot methods for better visual effect. - -print "" -print "" -print "" -print "" -print "" -print "" -print " Statistical Approximations, version 1.1" -print "" -print " Copyright (c) 1991, 1992, Jos van de Woude, jvdwoude@hut.nl" -print "" -print "" -print "" -print "" -print "" -print "" -print "" -print "" -print "" -print "" -print "" -print " NOTE: contains 10 plots and consequently takes some time to run" -print " Press Ctrl-C to exit right now" -print "" -pause -1 " Press Return to start demo ..." - -load "stat.inc" -rnd(x) = floor(x+0.5) -r_xmin = -1 -r_sigma = 4.0 - -# Binomial PDF using normal approximation -n = 25; p = 0.15 -mu = n * p -sigma = sqrt(n * p * (1.0 - p)) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * binom(floor((n+1)*p), n, p) #mode of binomial PDF used -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k, x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample 200 -set title "binomial PDF using normal approximation" -set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead -set arrow from mu, normal(mu + sigma, mu, sigma) \ - to mu + sigma, normal(mu + sigma, mu, sigma) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma) -plot binom(rnd(x), n, p) with histeps, normal(x, mu, sigma) -pause -1 "Hit return to continue" -unset arrow -unset label - -# Binomial PDF using poisson approximation -n = 50; p = 0.1 -mu = n * p -sigma = sqrt(mu) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * binom(floor((n+1)*p), n, p) #mode of binomial PDF used -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample (xmax - xmin + 3) -set title "binomial PDF using poisson approximation" -set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead -set arrow from mu, normal(mu + sigma, mu, sigma) \ - to mu + sigma, normal(mu + sigma, mu, sigma) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma) -plot binom(x, n, p) with histeps, poisson(x, mu) with histeps -pause -1 "Hit return to continue" -unset arrow -unset label - -# Geometric PDF using gamma approximation -p = 0.3 -mu = (1.0 - p) / p -sigma = sqrt(mu / p) -lambda = p -rho = 1.0 - p -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * p -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k, x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample 200 -set title "geometric PDF using gamma approximation" -set arrow from mu, 0 to mu, gmm(mu, rho, lambda) nohead -set arrow from mu, gmm(mu + sigma, rho, lambda) \ - to mu + sigma, gmm(mu + sigma, rho, lambda) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, gmm(mu + sigma, rho, lambda) -plot geometric(rnd(x),p) with histeps, gmm(x, rho, lambda) -pause -1 "Hit return to continue" -unset arrow -unset label - -# Geometric PDF using normal approximation -p = 0.3 -mu = (1.0 - p) / p -sigma = sqrt(mu / p) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * p -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k, x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample 200 -set title "geometric PDF using normal approximation" -set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead -set arrow from mu, normal(mu + sigma, mu, sigma) \ - to mu + sigma, normal(mu + sigma, mu, sigma) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma) -plot geometric(rnd(x),p) with histeps, normal(x, mu, sigma) -pause -1 "Hit return to continue" -unset arrow -unset label - -# Hypergeometric PDF using binomial approximation -nn = 75; mm = 25; n = 10 -p = real(mm) / nn -mu = n * p -sigma = sqrt(real(nn - n) / (nn - 1.0) * n * p * (1.0 - p)) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * hypgeo(floor(mu), nn, mm, n) #mode of binom PDF used -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample (xmax - xmin + 3) -set title "hypergeometric PDF using binomial approximation" -set arrow from mu, 0 to mu, binom(floor(mu), n, p) nohead -set arrow from mu, binom(floor(mu + sigma), n, p) \ - to mu + sigma, binom(floor(mu + sigma), n, p) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, binom(floor(mu + sigma), n, p) -plot hypgeo(x, nn, mm, n) with histeps, binom(x, n, p) with histeps -pause -1 "Hit return to continue" -unset arrow -unset label - -# Hypergeometric PDF using normal approximation -nn = 75; mm = 25; n = 10 -p = real(mm) / nn -mu = n * p -sigma = sqrt(real(nn - n) / (nn - 1.0) * n * p * (1.0 - p)) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * hypgeo(floor(mu), nn, mm, n) #mode of binom PDF used -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k, x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample 200 -set title "hypergeometric PDF using normal approximation" -set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead -set arrow from mu, normal(mu + sigma, mu, sigma) \ - to mu + sigma, normal(mu + sigma, mu, sigma) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma) -plot hypgeo(rnd(x), nn, mm, n) with histeps, normal(x, mu, sigma) -pause -1 "Hit return to continue" -unset arrow -unset label - -# Negative binomial PDF using gamma approximation -r = 8; p = 0.6 -mu = r * (1.0 - p) / p -sigma = sqrt(mu / p) -lambda = p -rho = r * (1.0 - p) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * gmm((rho - 1) / lambda, rho, lambda) #mode of gamma PDF used -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k, x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample 200 -set title "negative binomial PDF using gamma approximation" -set arrow from mu, 0 to mu, gmm(mu, rho, lambda) nohead -set arrow from mu, gmm(mu + sigma, rho, lambda) \ - to mu + sigma, gmm(mu + sigma, rho, lambda) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, gmm(mu + sigma, rho, lambda) -plot negbin(rnd(x), r, p) with histeps, gmm(x, rho, lambda) -pause -1 "Hit return to continue" -unset arrow -unset label - -# Negative binomial PDF using normal approximation -r = 8; p = 0.4 -mu = r * (1.0 - p) / p -sigma = sqrt(mu / p) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * negbin(floor((r-1)*(1-p)/p), r, p) #mode of gamma PDF used -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k, x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample 200 -set title "negative binomial PDF using normal approximation" -set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead -set arrow from mu, normal(mu + sigma, mu, sigma) \ - to mu + sigma, normal(mu + sigma, mu, sigma) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma) -plot negbin(rnd(x), r, p) with histeps, normal(x, mu, sigma) -pause -1 "Hit return to continue" -unset arrow -unset label - -# Normal PDF using logistic approximation -mu = 1.0; sigma = 1.5 -a = mu -lambda = pi / (sqrt(3.0) * sigma) -xmin = mu - r_sigma * sigma -xmax = mu + r_sigma * sigma -ymax = 1.1 * logistic(mu, a, lambda) #mode of logistic PDF used -set key box -unset zeroaxis -set xrange [xmin: xmax] -set yrange [0 : ymax] -set xlabel "x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%.1f" -set format y "%.2f" -set sample 200 -set title "normal PDF using logistic approximation" -set arrow from mu,0 to mu, normal(mu, mu, sigma) nohead -set arrow from mu, normal(mu + sigma, mu, sigma) \ - to mu + sigma, normal(mu + sigma, mu, sigma) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma) -plot logistic(x, a, lambda), normal(x, mu, sigma) -pause -1 "Hit return to continue" -unset arrow -unset label - -# Poisson PDF using normal approximation -mu = 5.0 -sigma = sqrt(mu) -xmin = floor(mu - r_sigma * sigma) -xmin = xmin < r_xmin ? r_xmin : xmin -xmax = ceil(mu + r_sigma * sigma) -ymax = 1.1 * poisson(mu, mu) #mode of poisson PDF used -set key box -unset zeroaxis -set xrange [xmin - 1 : xmax + 1] -set yrange [0 : ymax] -set xlabel "k, x ->" -set ylabel "probability density ->" -set ytics 0, ymax / 10.0, ymax -set format x "%2.0f" -set format y "%3.2f" -set sample 200 -set title "poisson PDF using normal approximation" -set arrow from mu, 0 to mu, normal(mu, mu, sigma) nohead -set arrow from mu, normal(mu + sigma, mu, sigma) \ - to mu + sigma, normal(mu + sigma, mu, sigma) nohead -set label "mu" at mu + 0.5, ymax / 10 -set label "sigma" at mu + 0.5 + sigma, normal(mu + sigma, mu, sigma) -plot poisson(rnd(x), mu) with histeps, normal(x, mu, sigma) -pause -1 "Hit return to continue" -reset diff --git a/tests/examplefiles/test.ps1 b/tests/examplefiles/test.ps1 deleted file mode 100644 index 385fb6f4..00000000 --- a/tests/examplefiles/test.ps1 +++ /dev/null @@ -1,108 +0,0 @@ -<# -.SYNOPSIS -Runs a T-SQL Query and optional outputs results to a delimited file. -.DESCRIPTION -Invoke-Sql script will run a T-SQL query or stored procedure and optionally outputs a delimited file. -.EXAMPLE -PowerShell.exe -File "C:\Scripts\Invoke-Sql.ps1" -ServerInstance "Z003\sqlprod2" -Database orders -Query "EXEC usp_accounts '12445678'" -This example connects to Z003\sqlprod2.Orders and executes a stored procedure which does not return a result set -.EXAMPLE -PowerShell.exe -File "C:\Scripts\Invoke-Sql.ps1" -ServerInstance "Z003\sqlprod2" -Database orders -Query "SELECT * FROM dbo.accounts" -FilePath "C:\Scripts\accounts.txt" -Delimiter "," -This example connects to Z003\sqlprod2.Orders and selects the records from the accounts tables, the data is outputed to a CSV file -.NOTES -Version History -v1.0 - Chad Miller - 12/14/2010 - Initial release -IMPORTANT!!! The EventLog source which is set to the application needs to be registered with -the Event log: -New-EventLog -LogName Application -Source $Application -#> -param( -#ServerInstance is Mandatory! -[Parameter(Position=0, Mandatory=$false)] [string]$ServerInstance, -#Database is Mandatory! -[Parameter(Position=1, Mandatory=$false)] [string]$Database, -#Query is Mandatory! -[Parameter(Position=2, Mandatory=$false)] [string]$Query, -[Parameter(Position=3, Mandatory=$false)] [string]$Application="Invoke-Sql.ps1", -[Parameter(Position=4, Mandatory=$false)] [string]$FilePath, -[Parameter(Position=7, Mandatory=$false)] [string]$Delimiter="|", -#If UserName isn't supplied a trusted connection will be used -[Parameter(Position=5, Mandatory=$false)] [string]$UserName, -[Parameter(Position=6, Mandatory=$false)] [string]$Password, -[Parameter(Position=8, Mandatory=$false)] [Int32]$QueryTimeout=600, -[Parameter(Position=9, Mandatory=$false)] [Int32]$ConnectionTimeout=15 -) - - -#This must be run as administrator on Windows 2008 and higher! -New-EventLog -LogName Application -Source $Application -EA SilentlyContinue -$Error.Clear() - -####################### -function Invoke-SqlCmd2 -{ - param( - [Parameter(Position=0, Mandatory=$true)] [string]$ServerInstance, - [Parameter(Position=1, Mandatory=$true)] [string]$Database, - [Parameter(Position=2, Mandatory=$true)] [string]$Query, - [Parameter(Position=3, Mandatory=$false)] [string]$UserName, - [Parameter(Position=4, Mandatory=$false)] [string]$Password, - [Parameter(Position=5, Mandatory=$false)] [Int32]$QueryTimeout, - [Parameter(Position=6, Mandatory=$false)] [Int32]$ConnectionTimeout - ) - - try { - if ($Username) - { $ConnectionString = "Server={0};Database={1};User ID={2};Password={3};Trusted_Connection=False;Connect Timeout={4}" -f $ServerInstance,$Database,$Username,$Password,$ConnectionTimeout } - else - { $ConnectionString = "Server={0};Database={1};Integrated Security=True;Connect Timeout={2}" -f $ServerInstance,$Database,$ConnectionTimeout } - $conn=new-object System.Data.SqlClient.SQLConnection - $conn.ConnectionString=$ConnectionString - $conn.Open() - $cmd=new-object system.Data.SqlClient.SqlCommand($Query,$conn) - $cmd.CommandTimeout=$QueryTimeout - $ds=New-Object system.Data.DataSet - $da=New-Object system.Data.SqlClient.SqlDataAdapter($cmd) - [void]$da.fill($ds) - Write-Output ($ds.Tables[0]) - } - finally { - $conn.Dispose() - } - -} #Invoke-SqlCmd2 - -####################### -# MAIN # -####################### -if ($PSBoundParameters.Count -eq 0) -{ - get-help $myInvocation.MyCommand.Path -full - break -} - -try { - $msg = $null - $msg += "Application/Job Name: $Application`n" - $msg += "Query: $Query`n" - $msg += "ServerInstance: $ServerInstance`n" - $msg += "Database: $Database`n" - $msg += "FilePath: $FilePath`n" - - Write-EventLog -LogName Application -Source "$Application" -EntryType Information -EventId 12345 -Message "Starting`n$msg" - $dt = Invoke-SqlCmd2 -ServerInstance $ServerInstance -Database $Database -Query $Query -UserName $UserName -Password $Password -QueryTimeOut $QueryTimeOut -ConnectionTimeout $ConnectionTimeout - if ($FilePath) - { - if ($dt) - { $dt | export-csv -Delimiter $Delimiter -Path $FilePath -NoTypeInformation } - else #Query Returned No Output! - {Write-EventLog -LogName Application -Source "$Application" -EntryType Warning -EventId 12345 -Message "NoOutput`n$msg" } - } - - Write-EventLog -LogName Application -Source "$Application" -EntryType Information -EventId 12345 -Message "Completed`n$msg" -} -catch { - $Exception = "{0}, {1}" -f $_.Exception.GetType().FullName,$( $_.Exception.Message -replace "'" ) - Write-EventLog -LogName Application -Source "$Application" -EntryType Error -EventId 12345 -Message "Error`n$msg`n$Exception" - throw -} diff --git a/tests/examplefiles/test.psl b/tests/examplefiles/test.psl deleted file mode 100644 index 3ac99498..00000000 --- a/tests/examplefiles/test.psl +++ /dev/null @@ -1,182 +0,0 @@ -// This is a comment - -// 1. Basics - -// Functions -func Add(X : Univ_Integer; Y : Univ_Integer) -> Univ_Integer is - return X + Y; -end func Add; -// End of line semi-colons are optional -// +, +=, -, -=, *, *=, /, /= -// all do what you'd expect (/ is integer division) - -// If you find Univ_Integer to be too verbose you can import Short_Names -// which defines aliases like Int for Univ_Integer and String for Univ_String -import PSL::Short_Names::*, * - -func Greetings() is - const S : String := "Hello, World!" - Println(S) -end func Greetings -// All declarations are 'const', 'var', or 'ref' -// Assignment is :=, equality checks are ==, and != is not equals - -func Boolean_Examples(B : Bool) is - const And := B and #true // Parallel execution of operands - const And_Then := B and then #true // Short-Circuit - const Or := B or #false // Parallel execution of operands - const Or_Else := B or else #false // Short-Cirtuit - const Xor := B xor #true - var Result : Bool := #true; - Result and= #false; - Result or= #true; - Result xor= #false; -end func Boolean_Examples -// Booleans are a special type of enumeration -// All enumerations are preceded by a sharp '#' - -func Fib(N : Int) {N >= 0} -> Int is - if N <= 1 then - return N - else - // Left and right side of '+' are computed in Parallel here - return Fib(N - 1) + Fib(N - 2) - end if -end func Fib -// '{N >= 0}' is a precondition to this function -// Preconditions are built in to the language and checked by the compiler - -// ParaSail does not have mutable global variables -// Instead, use 'var' parameters -func Increment_All(var Nums : Vector) is - for each Elem of Nums concurrent loop - Elem += 1 - end loop -end func Increment_All -// The 'concurrent' keyword in the loop header tells the compiler that -// iterations of the loop can happen in any order. -// It will choose the most optimal number of threads to use. -// Other options are 'forward' and 'reverse'. - -func Sum_Of_Squares(N : Int) -> Int is - // The type of Sum is inferred - var Sum := 0 - for I in 1 .. N forward loop - Sum += I ** 2 // ** is exponentiation - end loop -end func Sum_Of_Squares - -func Sum_Of(N : Int; Map : func (Int) -> Int) -> Int is - return (for I in 1 .. N => <0> + Map(I)) -end func Sum_Of -// It has functional aspects as well -// Here, we're taking an (Int) -> Int function as a parameter -// and using the inherently parallel map-reduce. -// Initial value is enclosed with angle brackets - -func main(Args : Basic_Array) is - Greetings() // Hello World - Println(Fib(5)) // 5 - // Container Comprehension - var Vec : Vector := [for I in 0 .. 10 {I mod 2 == 0} => I ** 2] - // Vec = [0, 4, 16, 36, 64, 100] - Increment_All(Vec) - // Vec = [1, 5, 17, 37, 65, 101] - // '|' is an overloaded operator. - // It's usually used for concatenation or adding to a container - Println("First: " | Vec[1] | ", Last: " | Vec[Length(Vec)]); - // Vectors are 1 indexed, 0 indexed ZVectors are also available - - Println(Sum_Of_Squares(3)) - - // Sum of fibs! - Println(Sum_Of(10, Fib)) -end func main - -// Preceding a type with 'optional' allows it to take the value 'null' -func Divide(A, B, C : Real) -> optional Real is - // Real is the floating point type - const Epsilon := 1.0e-6; - if B in -Epsilon .. Epsilon then - return null - elsif C in -Epsilon .. Epsilon then - return null - else - return A / B + A / C - end if -end func Divide - -// 2. Modules -// Modules are composed of an interface and a class -// ParaSail has object orientation features - -// modules can be defined as 'concurrent' -// which allows 'locked' and 'queued' parameters -concurrent interface Locked_Box> is - // Create a box with the given content - func Create(C : optional Content_Type) -> Locked_Box; - - // Put something into the box - func Put(locked var B : Locked_Box; C : Content_Type); - - // Get a copy of current content - func Content(locked B : Locked_Box) -> optional Content_Type; - - // Remove current content, leaving it null - func Remove(locked var B : Locked_Box) -> optional Content_Type; - - // Wait until content is non-null, then return it, leaving it null. - func Get(queued var B : Locked_Box) -> Content_Type; -end interface Locked_Box; - -concurrent class Locked_Box is - var Content : optional Content_Type; -exports - func Create(C : optional Content_Type) -> Locked_Box is - return (Content => C); - end func Create; - - func Put(locked var B : Locked_Box; C : Content_Type) is - B.Content := C; - end func Put; - - func Content(locked B : Locked_Box) -> optional Content_Type is - return B.Content; - end func Content; - - func Remove(locked var B : Locked_Box) -> Result : optional Content_Type is - // '<==' is the move operator - // It moves the right operand into the left operand, - // leaving the right null. - Result <== B.Content; - end func Remove; - - func Get(queued var B : Locked_Box) -> Result : Content_Type is - queued until B.Content not null then - Result <== B.Content; - end func Get; -end class Locked_Box; - -func Use_Box(Seed : Univ_Integer) is - var U_Box : Locked_Box := Create(null); - // The type of 'Ran' can be left out because - // it is inferred from the return type of Random::Start - var Ran := Random::Start(Seed); - - Println("Starting 100 pico-threads trying to put something in the box"); - Println(" or take something out."); - for I in 1..100 concurrent loop - if I < 30 then - Println("Getting out " | Get(U_Box)); - else - Println("Putting in " | I); - U_Box.Put(I); - - // The first parameter can be moved to the front with a dot - // X.Foo(Y) is equivalent to Foo(X, Y) - end if; - end loop; - - Println("And the winner is: " | Remove(U_Box)); - Println("And the box is now " | Content(U_Box)); -end func Use_Box; diff --git a/tests/examplefiles/test.pwn b/tests/examplefiles/test.pwn deleted file mode 100644 index d6468617..00000000 --- a/tests/examplefiles/test.pwn +++ /dev/null @@ -1,253 +0,0 @@ -#include - -// Single line comment -/* Multi line - comment */ - -/// documentation -/** - - documentation multi line - -**/ - -public OnGameModeInit() { - printf("Hello, World!"); -} - -enum info { - Float:ex; - exa, - exam[5], -} -new arr[5][info]; - -stock Float:test_func() -{ - new a = 5, Float:b = 10.3; - if (a == b) { - - } else { - - } - - for (new i = 0; i < 10; i++) { - continue; - } - - do { - a--; - } while (a > 0); - - while (a < 5) { - a++; - break; - } - - switch (a) { - case 0: { - } - case 0..4: { - } - case 5, 6: { - } - } - - static x; - new xx = a > 5 ? 5 : 0; - new array[sizeof arr] = {0}; - tagof a; - state a; - goto label; - new byte[2 char]; - byte{0} = 'a'; - - return (float(a) + b); -} - - -// float.inc -/* Float arithmetic - * - * (c) Copyright 1999, Artran, Inc. - * Written by Greg Garner (gmg@artran.com) - * Modified in March 2001 to include user defined - * operators for the floating point functions. - * - * This file is provided as is (no warranties). - */ -#if defined _Float_included - #endinput -#endif -#define _Float_included -#pragma library Float - -/* Different methods of rounding */ -enum floatround_method { - floatround_round, - floatround_floor, - floatround_ceil, - floatround_tozero, - floatround_unbiased -} -enum anglemode { - radian, - degrees, - grades -} - -/**************************************************/ -/* Convert an integer into a floating point value */ -native Float:float(value); - -/**************************************************/ -/* Convert a string into a floating point value */ -native Float:floatstr(const string[]); - -/**************************************************/ -/* Multiple two floats together */ -native Float:floatmul(Float:oper1, Float:oper2); - -/**************************************************/ -/* Divide the dividend float by the divisor float */ -native Float:floatdiv(Float:dividend, Float:divisor); - -/**************************************************/ -/* Add two floats together */ -native Float:floatadd(Float:oper1, Float:oper2); - -/**************************************************/ -/* Subtract oper2 float from oper1 float */ -native Float:floatsub(Float:oper1, Float:oper2); - -/**************************************************/ -/* Return the fractional part of a float */ -native Float:floatfract(Float:value); - -/**************************************************/ -/* Round a float into a integer value */ -native floatround(Float:value, floatround_method:method=floatround_round); - -/**************************************************/ -/* Compare two integers. If the two elements are equal, return 0. - If the first argument is greater than the second argument, return 1, - If the first argument is less than the second argument, return -1. */ -native floatcmp(Float:oper1, Float:oper2); - -/**************************************************/ -/* Return the square root of the input value, same as floatpower(value, 0.5) */ -native Float:floatsqroot(Float:value); - -/**************************************************/ -/* Return the value raised to the power of the exponent */ -native Float:floatpower(Float:value, Float:exponent); - -/**************************************************/ -/* Return the logarithm */ -native Float:floatlog(Float:value, Float:base=10.0); - -/**************************************************/ -/* Return the sine, cosine or tangent. The input angle may be in radian, - degrees or grades. */ -native Float:floatsin(Float:value, anglemode:mode=radian); -native Float:floatcos(Float:value, anglemode:mode=radian); -native Float:floattan(Float:value, anglemode:mode=radian); - -/**************************************************/ -/* Return the absolute value */ -native Float:floatabs(Float:value); - - -/**************************************************/ -#pragma rational Float - -/* user defined operators */ -native Float:operator*(Float:oper1, Float:oper2) = floatmul; -native Float:operator/(Float:oper1, Float:oper2) = floatdiv; -native Float:operator+(Float:oper1, Float:oper2) = floatadd; -native Float:operator-(Float:oper1, Float:oper2) = floatsub; -native Float:operator=(oper) = float; - -stock Float:operator++(Float:oper) - return oper+1.0; - -stock Float:operator--(Float:oper) - return oper-1.0; - -stock Float:operator-(Float:oper) - return oper^Float:cellmin; /* IEEE values are sign/magnitude */ - -stock Float:operator*(Float:oper1, oper2) - return floatmul(oper1, float(oper2)); /* "*" is commutative */ - -stock Float:operator/(Float:oper1, oper2) - return floatdiv(oper1, float(oper2)); - -stock Float:operator/(oper1, Float:oper2) - return floatdiv(float(oper1), oper2); - -stock Float:operator+(Float:oper1, oper2) - return floatadd(oper1, float(oper2)); /* "+" is commutative */ - -stock Float:operator-(Float:oper1, oper2) - return floatsub(oper1, float(oper2)); - -stock Float:operator-(oper1, Float:oper2) - return floatsub(float(oper1), oper2); - -stock bool:operator==(Float:oper1, Float:oper2) - return floatcmp(oper1, oper2) == 0; - -stock bool:operator==(Float:oper1, oper2) - return floatcmp(oper1, float(oper2)) == 0; /* "==" is commutative */ - -stock bool:operator!=(Float:oper1, Float:oper2) - return floatcmp(oper1, oper2) != 0; - -stock bool:operator!=(Float:oper1, oper2) - return floatcmp(oper1, float(oper2)) != 0; /* "!=" is commutative */ - -stock bool:operator>(Float:oper1, Float:oper2) - return floatcmp(oper1, oper2) > 0; - -stock bool:operator>(Float:oper1, oper2) - return floatcmp(oper1, float(oper2)) > 0; - -stock bool:operator>(oper1, Float:oper2) - return floatcmp(float(oper1), oper2) > 0; - -stock bool:operator>=(Float:oper1, Float:oper2) - return floatcmp(oper1, oper2) >= 0; - -stock bool:operator>=(Float:oper1, oper2) - return floatcmp(oper1, float(oper2)) >= 0; - -stock bool:operator>=(oper1, Float:oper2) - return floatcmp(float(oper1), oper2) >= 0; - -stock bool:operator<(Float:oper1, Float:oper2) - return floatcmp(oper1, oper2) < 0; - -stock bool:operator<(Float:oper1, oper2) - return floatcmp(oper1, float(oper2)) < 0; - -stock bool:operator<(oper1, Float:oper2) - return floatcmp(float(oper1), oper2) < 0; - -stock bool:operator<=(Float:oper1, Float:oper2) - return floatcmp(oper1, oper2) <= 0; - -stock bool:operator<=(Float:oper1, oper2) - return floatcmp(oper1, float(oper2)) <= 0; - -stock bool:operator<=(oper1, Float:oper2) - return floatcmp(float(oper1), oper2) <= 0; - -stock bool:operator!(Float:oper) - return (_:oper & cellmax) == 0; - -/* forbidden operations */ -forward operator%(Float:oper1, Float:oper2); -forward operator%(Float:oper1, oper2); -forward operator%(oper1, Float:oper2); - diff --git a/tests/examplefiles/test.pypylog b/tests/examplefiles/test.pypylog deleted file mode 100644 index 1a6aa5ed..00000000 --- a/tests/examplefiles/test.pypylog +++ /dev/null @@ -1,1000 +0,0 @@ -[5ed621f277b8] {jit-backend-counts -[5ed621f309bc] jit-backend-counts} -[5ed622c957b0] {jit-log-opt-loop -# Loop 0 : loop with 145 ops -[p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, f11, p12, p13, p14, p15, i16, f17, i18, i19, i20, i21, i22, i23, i24, i25, i26, f27, i28, f29, f30] -debug_merge_point(' #125 FOR_ITER', 0) -i32 = int_gt(i18, 0) -guard_true(i32, descr=) [p1, p0, p5, p2, p3, p4, p6, p7, p8, p9, p10, p12, p13, p14, p15, i16, f17, f11] -i33 = int_add(i19, i20) -i35 = int_sub(i18, 1) -debug_merge_point(' #128 STORE_FAST', 0) -debug_merge_point(' #131 LOAD_FAST', 0) -debug_merge_point(' #134 LOAD_FAST', 0) -debug_merge_point(' #137 LOAD_FAST', 0) -debug_merge_point(' #140 BINARY_MULTIPLY', 0) -setfield_gc(p5, i33, descr=) -setfield_gc(p5, i35, descr=) -i36 = int_mul_ovf(i21, i22) -guard_no_overflow(, descr=) [p1, p0, p12, p15, i36, p2, p3, p4, p5, p14, p6, p7, p8, p9, p10, p13, i19, None, f17, f11] -debug_merge_point(' #141 LOAD_FAST', 0) -debug_merge_point(' #144 BINARY_ADD', 0) -i38 = int_add_ovf(i36, i19) -guard_no_overflow(, descr=) [p1, p0, i38, p2, p3, p4, p5, p14, p6, p7, p8, p9, p10, p12, p13, p15, i36, i19, None, f17, f11] -debug_merge_point(' #145 BINARY_SUBSCR', 0) -i40 = int_lt(i38, 0) -guard_false(i40, descr=) [p1, p0, p14, i38, i23, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, None, i19, None, f17, f11] -i41 = int_lt(i38, i23) -guard_true(i41, descr=) [p1, p0, p14, i38, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, None, i19, None, f17, f11] -f42 = getarrayitem_raw(i24, i38, descr=) -debug_merge_point(' #146 STORE_FAST', 0) -debug_merge_point(' #149 LOAD_FAST', 0) -debug_merge_point(' #152 LOAD_FAST', 0) -debug_merge_point(' #155 LOAD_CONST', 0) -debug_merge_point(' #158 BINARY_SUBTRACT', 0) -debug_merge_point(' #159 LOAD_FAST', 0) -debug_merge_point(' #162 BINARY_MULTIPLY', 0) -debug_merge_point(' #163 LOAD_FAST', 0) -debug_merge_point(' #166 BINARY_ADD', 0) -i43 = int_add_ovf(i25, i19) -guard_no_overflow(, descr=) [p1, p0, i43, p2, p3, p4, p5, p14, p6, p7, p8, p9, p10, p12, p13, p15, f42, i25, None, i19, None, None, f11] -debug_merge_point(' #167 BINARY_SUBSCR', 0) -i45 = int_lt(i43, 0) -guard_false(i45, descr=) [p1, p0, p14, i43, i23, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, f42, None, None, i19, None, None, f11] -i46 = int_lt(i43, i23) -guard_true(i46, descr=) [p1, p0, p14, i43, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, f42, None, None, i19, None, None, f11] -f47 = getarrayitem_raw(i24, i43, descr=) -debug_merge_point(' #168 LOAD_FAST', 0) -debug_merge_point(' #171 LOAD_FAST', 0) -debug_merge_point(' #174 LOAD_CONST', 0) -debug_merge_point(' #177 BINARY_ADD', 0) -debug_merge_point(' #178 LOAD_FAST', 0) -debug_merge_point(' #181 BINARY_MULTIPLY', 0) -debug_merge_point(' #182 LOAD_FAST', 0) -debug_merge_point(' #185 BINARY_ADD', 0) -i48 = int_add_ovf(i26, i19) -guard_no_overflow(, descr=) [p1, p0, i48, p2, p3, p4, p5, p14, p6, p7, p8, p9, p10, p12, p13, p15, i26, f47, f42, None, None, i19, None, None, f11] -debug_merge_point(' #186 BINARY_SUBSCR', 0) -i50 = int_lt(i48, 0) -guard_false(i50, descr=) [p1, p0, p14, i48, i23, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, None, f47, f42, None, None, i19, None, None, f11] -i51 = int_lt(i48, i23) -guard_true(i51, descr=) [p1, p0, p14, i48, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, None, f47, f42, None, None, i19, None, None, f11] -f52 = getarrayitem_raw(i24, i48, descr=) -debug_merge_point(' #187 BINARY_ADD', 0) -f53 = float_add(f47, f52) -debug_merge_point(' #188 LOAD_FAST', 0) -debug_merge_point(' #191 BINARY_MULTIPLY', 0) -f54 = float_mul(f53, f27) -debug_merge_point(' #192 LOAD_FAST', 0) -debug_merge_point(' #195 LOAD_FAST', 0) -debug_merge_point(' #198 LOAD_FAST', 0) -debug_merge_point(' #201 BINARY_MULTIPLY', 0) -debug_merge_point(' #202 LOAD_FAST', 0) -debug_merge_point(' #205 BINARY_ADD', 0) -i55 = int_add_ovf(i28, i19) -guard_no_overflow(, descr=) [p1, p0, i55, p2, p3, p4, p5, p14, p6, p7, p8, p9, p10, p12, p13, p15, f54, i28, None, None, f42, None, None, i19, None, None, f11] -debug_merge_point(' #206 LOAD_CONST', 0) -debug_merge_point(' #209 BINARY_SUBTRACT', 0) -i57 = int_sub_ovf(i55, 1) -guard_no_overflow(, descr=) [p1, p0, i57, p2, p3, p4, p5, p14, p6, p7, p8, p9, p10, p12, p13, p15, i55, f54, None, None, None, f42, None, None, i19, None, None, f11] -debug_merge_point(' #210 BINARY_SUBSCR', 0) -i59 = int_lt(i57, 0) -guard_false(i59, descr=) [p1, p0, p14, i57, i23, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, None, f54, None, None, None, f42, None, None, i19, None, None, f11] -i60 = int_lt(i57, i23) -guard_true(i60, descr=) [p1, p0, p14, i57, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, None, f54, None, None, None, f42, None, None, i19, None, None, f11] -f61 = getarrayitem_raw(i24, i57, descr=) -debug_merge_point(' #211 LOAD_FAST', 0) -debug_merge_point(' #214 LOAD_FAST', 0) -debug_merge_point(' #217 LOAD_FAST', 0) -debug_merge_point(' #220 BINARY_MULTIPLY', 0) -debug_merge_point(' #221 LOAD_FAST', 0) -debug_merge_point(' #224 BINARY_ADD', 0) -debug_merge_point(' #225 LOAD_CONST', 0) -debug_merge_point(' #228 BINARY_ADD', 0) -i63 = int_add_ovf(i55, 1) -guard_no_overflow(, descr=) [p1, p0, i63, p2, p3, p4, p5, p14, p6, p7, p8, p9, p10, p12, p13, p15, f61, i55, f54, None, None, None, f42, None, None, i19, None, None, f11] -debug_merge_point(' #229 BINARY_SUBSCR', 0) -i64 = int_lt(i63, i23) -guard_true(i64, descr=) [p1, p0, p14, i63, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, f61, None, f54, None, None, None, f42, None, None, i19, None, None, f11] -f65 = getarrayitem_raw(i24, i63, descr=) -debug_merge_point(' #230 BINARY_ADD', 0) -f66 = float_add(f61, f65) -debug_merge_point(' #231 LOAD_FAST', 0) -debug_merge_point(' #234 BINARY_MULTIPLY', 0) -f67 = float_mul(f66, f29) -debug_merge_point(' #235 BINARY_ADD', 0) -f68 = float_add(f54, f67) -debug_merge_point(' #236 LOAD_FAST', 0) -debug_merge_point(' #239 BINARY_MULTIPLY', 0) -f69 = float_mul(f68, f30) -debug_merge_point(' #240 LOAD_FAST', 0) -debug_merge_point(' #243 LOAD_FAST', 0) -debug_merge_point(' #246 LOAD_FAST', 0) -debug_merge_point(' #249 BINARY_MULTIPLY', 0) -debug_merge_point(' #250 LOAD_FAST', 0) -debug_merge_point(' #253 BINARY_ADD', 0) -debug_merge_point(' #254 STORE_SUBSCR', 0) -i70 = int_lt(i55, i23) -guard_true(i70, descr=) [p1, p0, p14, i55, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p15, f69, None, None, None, None, None, None, f42, None, None, i19, None, None, f11] -setarrayitem_raw(i24, i55, f69, descr=) -debug_merge_point(' #255 LOAD_FAST', 0) -debug_merge_point(' #258 LOAD_GLOBAL', 0) -debug_merge_point(' #261 LOAD_FAST', 0) -debug_merge_point(' #264 LOAD_FAST', 0) -debug_merge_point(' #267 LOAD_FAST', 0) -debug_merge_point(' #270 BINARY_MULTIPLY', 0) -debug_merge_point(' #271 LOAD_FAST', 0) -debug_merge_point(' #274 BINARY_ADD', 0) -debug_merge_point(' #275 BINARY_SUBSCR', 0) -f71 = getarrayitem_raw(i24, i55, descr=) -debug_merge_point(' #276 LOAD_FAST', 0) -debug_merge_point(' #279 BINARY_SUBTRACT', 0) -f72 = float_sub(f71, f42) -debug_merge_point(' #280 CALL_FUNCTION', 0) -i73 = force_token() -debug_merge_point(' #0 LOAD_FAST', 1) -debug_merge_point(' #3 LOAD_FAST', 1) -debug_merge_point(' #6 BINARY_MULTIPLY', 1) -f74 = float_mul(f72, f72) -debug_merge_point(' #7 RETURN_VALUE', 1) -debug_merge_point(' #283 INPLACE_ADD', 0) -f75 = float_add(f11, f74) -debug_merge_point(' #284 STORE_FAST', 0) -debug_merge_point(' #287 JUMP_ABSOLUTE', 0) -i77 = getfield_raw(38968960, descr=) -i79 = int_sub(i77, 26) -setfield_raw(38968960, i79, descr=) -i81 = int_lt(i79, 0) -guard_false(i81, descr=) [p1, p0, p2, p3, p4, p5, p6, p7, p8, p9, p10, p12, p13, p14, p15, f75, None, None, None, None, None, None, None, f42, None, None, i19, None, None, None] -debug_merge_point(' #125 FOR_ITER', 0) -jump(p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, f75, p12, p13, p14, p15, i19, f42, i35, i33, i20, i21, i22, i23, i24, i25, i26, f27, i36, f29, f30, descr=) -[5ed622d5187e] jit-log-opt-loop} -[5ed622e116d0] {jit-log-opt-loop -# Loop 1 : entry bridge with 188 ops -[p0, p1, p2, p3, i4, p5, i6, i7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24, p25, p26] -debug_merge_point(' #125 FOR_ITER', 0) -guard_value(i4, 2, descr=) [i4, p1, p0, p2, p3, p5, i6, i7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24, p25, p26] -guard_class(p9, 19861240, descr=) [p1, p0, p9, p2, p3, p5, i6, p8, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24, p25, p26] -i29 = getfield_gc(p9, descr=) -i31 = int_gt(i29, 0) -guard_true(i31, descr=) [p1, p0, p9, p2, p3, p5, i6, p8, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24, p25, p26] -i32 = getfield_gc(p9, descr=) -i33 = getfield_gc(p9, descr=) -i34 = int_add(i32, i33) -i36 = int_sub(i29, 1) -setfield_gc(p9, i34, descr=) -setfield_gc(p9, i36, descr=) -guard_value(i6, 0, descr=) [i6, p1, p0, p2, p3, p5, p8, p9, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24, p25, p26, i32] -debug_merge_point(' #128 STORE_FAST', 0) -debug_merge_point(' #131 LOAD_FAST', 0) -guard_nonnull_class(p23, 19886912, descr=) [p1, p0, p23, p2, p3, p5, p8, p9, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, p26, i32] -debug_merge_point(' #134 LOAD_FAST', 0) -guard_nonnull_class(p24, ConstClass(W_IntObject), descr=) [p1, p0, p24, p2, p3, p5, p8, p9, p23, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p26, i32] -debug_merge_point(' #137 LOAD_FAST', 0) -guard_nonnull_class(p21, ConstClass(W_IntObject), descr=) [p1, p0, p21, p2, p3, p5, p8, p9, p23, p24, p12, p13, p14, p15, p16, p17, p18, p19, p20, p22, p26, i32] -debug_merge_point(' #140 BINARY_MULTIPLY', 0) -i41 = getfield_gc_pure(p24, descr=) -i42 = getfield_gc_pure(p21, descr=) -i43 = int_mul_ovf(i41, i42) -guard_no_overflow(, descr=) [p1, p0, p21, p24, i43, p2, p3, p5, p8, p9, p23, p13, p14, p15, p16, p17, p18, p19, p20, p22, p26, i32] -debug_merge_point(' #141 LOAD_FAST', 0) -debug_merge_point(' #144 BINARY_ADD', 0) -i44 = int_add_ovf(i43, i32) -guard_no_overflow(, descr=) [p1, p0, i44, p2, p3, p5, p8, p9, p23, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, p26, i43, i32] -debug_merge_point(' #145 BINARY_SUBSCR', 0) -i45 = getfield_gc(p23, descr=) -i47 = int_lt(i44, 0) -guard_false(i47, descr=) [p1, p0, p23, i44, i45, p2, p3, p5, p8, p9, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, p26, None, i32] -i49 = int_lt(i44, i45) -guard_true(i49, descr=) [p1, p0, p23, i44, p2, p3, p5, p8, p9, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, p26, None, i32] -i50 = getfield_gc(p23, descr=) -f51 = getarrayitem_raw(i50, i44, descr=) -debug_merge_point(' #146 STORE_FAST', 0) -debug_merge_point(' #149 LOAD_FAST', 0) -debug_merge_point(' #152 LOAD_FAST', 0) -debug_merge_point(' #155 LOAD_CONST', 0) -guard_value(p2, ConstPtr(ptr52), descr=) [p1, p0, p2, p3, p5, p8, p9, p23, p24, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, f51, None, i32] -debug_merge_point(' #158 BINARY_SUBTRACT', 0) -i54 = int_sub_ovf(i41, 1) -guard_no_overflow(, descr=) [p1, p0, p24, i54, p3, p5, p8, p9, p23, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, f51, None, i32] -debug_merge_point(' #159 LOAD_FAST', 0) -debug_merge_point(' #162 BINARY_MULTIPLY', 0) -i55 = int_mul_ovf(i54, i42) -guard_no_overflow(, descr=) [p1, p0, p21, i55, p3, p5, p8, p9, p23, p13, p14, p15, p16, p17, p18, p19, p20, p22, p24, i54, f51, None, i32] -debug_merge_point(' #163 LOAD_FAST', 0) -debug_merge_point(' #166 BINARY_ADD', 0) -i56 = int_add_ovf(i55, i32) -guard_no_overflow(, descr=) [p1, p0, i56, p3, p5, p8, p9, p23, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, i55, None, f51, None, i32] -debug_merge_point(' #167 BINARY_SUBSCR', 0) -i58 = int_lt(i56, 0) -guard_false(i58, descr=) [p1, p0, p23, i56, i45, p3, p5, p8, p9, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, None, None, f51, None, i32] -i59 = int_lt(i56, i45) -guard_true(i59, descr=) [p1, p0, p23, i56, p3, p5, p8, p9, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, None, None, f51, None, i32] -f60 = getarrayitem_raw(i50, i56, descr=) -debug_merge_point(' #168 LOAD_FAST', 0) -debug_merge_point(' #171 LOAD_FAST', 0) -debug_merge_point(' #174 LOAD_CONST', 0) -debug_merge_point(' #177 BINARY_ADD', 0) -i62 = int_add_ovf(i41, 1) -guard_no_overflow(, descr=) [p1, p0, p24, i62, p3, p5, p8, p9, p23, p14, p15, p16, p17, p18, p19, p20, p21, p22, f60, None, None, f51, None, i32] -debug_merge_point(' #178 LOAD_FAST', 0) -debug_merge_point(' #181 BINARY_MULTIPLY', 0) -i63 = int_mul_ovf(i62, i42) -guard_no_overflow(, descr=) [p1, p0, p21, i63, p3, p5, p8, p9, p23, p14, p15, p16, p17, p18, p19, p20, p22, p24, i62, f60, None, None, f51, None, i32] -debug_merge_point(' #182 LOAD_FAST', 0) -debug_merge_point(' #185 BINARY_ADD', 0) -i64 = int_add_ovf(i63, i32) -guard_no_overflow(, descr=) [p1, p0, i64, p3, p5, p8, p9, p23, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, i63, None, f60, None, None, f51, None, i32] -debug_merge_point(' #186 BINARY_SUBSCR', 0) -i66 = int_lt(i64, 0) -guard_false(i66, descr=) [p1, p0, p23, i64, i45, p3, p5, p8, p9, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, None, None, f60, None, None, f51, None, i32] -i67 = int_lt(i64, i45) -guard_true(i67, descr=) [p1, p0, p23, i64, p3, p5, p8, p9, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, None, None, f60, None, None, f51, None, i32] -f68 = getarrayitem_raw(i50, i64, descr=) -debug_merge_point(' #187 BINARY_ADD', 0) -f69 = float_add(f60, f68) -debug_merge_point(' #188 LOAD_FAST', 0) -guard_nonnull_class(p18, 19800744, descr=) [p1, p0, p18, p3, p5, p8, p9, p14, p15, p16, p17, p19, p20, p21, p22, p23, p24, f69, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #191 BINARY_MULTIPLY', 0) -f71 = getfield_gc_pure(p18, descr=) -f72 = float_mul(f69, f71) -debug_merge_point(' #192 LOAD_FAST', 0) -debug_merge_point(' #195 LOAD_FAST', 0) -debug_merge_point(' #198 LOAD_FAST', 0) -debug_merge_point(' #201 BINARY_MULTIPLY', 0) -debug_merge_point(' #202 LOAD_FAST', 0) -debug_merge_point(' #205 BINARY_ADD', 0) -debug_merge_point(' #206 LOAD_CONST', 0) -debug_merge_point(' #209 BINARY_SUBTRACT', 0) -i74 = int_sub(i44, 1) -debug_merge_point(' #210 BINARY_SUBSCR', 0) -i76 = int_lt(i74, 0) -guard_false(i76, descr=) [p1, p0, p23, i74, i45, p3, p5, p8, p9, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, f72, None, None, None, None, None, None, f51, None, i32] -i77 = int_lt(i74, i45) -guard_true(i77, descr=) [p1, p0, p23, i74, p3, p5, p8, p9, p14, p15, p16, p17, p18, p19, p20, p21, p22, p24, f72, None, None, None, None, None, None, f51, None, i32] -f78 = getarrayitem_raw(i50, i74, descr=) -debug_merge_point(' #211 LOAD_FAST', 0) -debug_merge_point(' #214 LOAD_FAST', 0) -debug_merge_point(' #217 LOAD_FAST', 0) -debug_merge_point(' #220 BINARY_MULTIPLY', 0) -debug_merge_point(' #221 LOAD_FAST', 0) -debug_merge_point(' #224 BINARY_ADD', 0) -debug_merge_point(' #225 LOAD_CONST', 0) -debug_merge_point(' #228 BINARY_ADD', 0) -i80 = int_add(i44, 1) -debug_merge_point(' #229 BINARY_SUBSCR', 0) -i81 = int_lt(i80, i45) -guard_true(i81, descr=) [p1, p0, p23, i80, p3, p5, p8, p9, p15, p16, p17, p18, p19, p20, p21, p22, p24, f78, f72, None, None, None, None, None, None, f51, None, i32] -f82 = getarrayitem_raw(i50, i80, descr=) -debug_merge_point(' #230 BINARY_ADD', 0) -f83 = float_add(f78, f82) -debug_merge_point(' #231 LOAD_FAST', 0) -guard_nonnull_class(p17, 19800744, descr=) [p1, p0, p17, p3, p5, p8, p9, p15, p16, p18, p19, p20, p21, p22, p23, p24, f83, None, f72, None, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #234 BINARY_MULTIPLY', 0) -f85 = getfield_gc_pure(p17, descr=) -f86 = float_mul(f83, f85) -debug_merge_point(' #235 BINARY_ADD', 0) -f87 = float_add(f72, f86) -debug_merge_point(' #236 LOAD_FAST', 0) -guard_nonnull_class(p19, 19800744, descr=) [p1, p0, p19, p3, p5, p8, p9, p15, p16, p17, p18, p20, p21, p22, p23, p24, f87, None, None, None, None, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #239 BINARY_MULTIPLY', 0) -f89 = getfield_gc_pure(p19, descr=) -f90 = float_mul(f87, f89) -debug_merge_point(' #240 LOAD_FAST', 0) -debug_merge_point(' #243 LOAD_FAST', 0) -debug_merge_point(' #246 LOAD_FAST', 0) -debug_merge_point(' #249 BINARY_MULTIPLY', 0) -debug_merge_point(' #250 LOAD_FAST', 0) -debug_merge_point(' #253 BINARY_ADD', 0) -debug_merge_point(' #254 STORE_SUBSCR', 0) -setarrayitem_raw(i50, i44, f90, descr=) -debug_merge_point(' #255 LOAD_FAST', 0) -guard_nonnull_class(p20, 19800744, descr=) [p1, p0, p20, p3, p5, p8, p9, p15, p16, p17, p18, p19, p21, p22, p23, p24, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #258 LOAD_GLOBAL', 0) -p92 = getfield_gc(p0, descr=) -guard_value(p92, ConstPtr(ptr93), descr=) [p1, p0, p92, p3, p5, p8, p9, p20, p15, p16, p17, p18, p19, p21, p22, p23, p24, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -p94 = getfield_gc(p92, descr=) -guard_isnull(p94, descr=) [p1, p0, p94, p92, p3, p5, p8, p9, p20, p15, p16, p17, p18, p19, p21, p22, p23, p24, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -p96 = getfield_gc(ConstPtr(ptr95), descr=) -guard_nonnull_class(p96, ConstClass(Function), descr=) [p1, p0, p96, p3, p5, p8, p9, p20, p15, p16, p17, p18, p19, p21, p22, p23, p24, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #261 LOAD_FAST', 0) -debug_merge_point(' #264 LOAD_FAST', 0) -debug_merge_point(' #267 LOAD_FAST', 0) -debug_merge_point(' #270 BINARY_MULTIPLY', 0) -debug_merge_point(' #271 LOAD_FAST', 0) -debug_merge_point(' #274 BINARY_ADD', 0) -debug_merge_point(' #275 BINARY_SUBSCR', 0) -f98 = getarrayitem_raw(i50, i44, descr=) -debug_merge_point(' #276 LOAD_FAST', 0) -debug_merge_point(' #279 BINARY_SUBTRACT', 0) -f99 = float_sub(f98, f51) -debug_merge_point(' #280 CALL_FUNCTION', 0) -p100 = getfield_gc(p96, descr=) -guard_value(p100, ConstPtr(ptr101), descr=) [p1, p0, p100, p96, p3, p5, p8, p9, p20, p15, p16, p17, p18, p19, p21, p22, p23, p24, f99, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -p102 = getfield_gc(p96, descr=) -p103 = getfield_gc(p96, descr=) -p105 = call(ConstClass(getexecutioncontext), descr=) -p106 = getfield_gc(p105, descr=) -i107 = force_token() -p108 = getfield_gc(p105, descr=) -guard_isnull(p108, descr=) [p1, p0, p105, p108, p3, p5, p8, p9, p20, p96, p15, p16, p17, p18, p19, p21, p22, p23, p24, p106, p102, i107, f99, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -i109 = getfield_gc(p105, descr=) -i110 = int_is_zero(i109) -guard_true(i110, descr=) [p1, p0, p105, p3, p5, p8, p9, p20, p96, p15, p16, p17, p18, p19, p21, p22, p23, p24, p106, p102, i107, f99, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #0 LOAD_FAST', 1) -debug_merge_point(' #3 LOAD_FAST', 1) -debug_merge_point(' #6 BINARY_MULTIPLY', 1) -f111 = float_mul(f99, f99) -debug_merge_point(' #7 RETURN_VALUE', 1) -i112 = int_is_true(i109) -guard_false(i112, descr=) [p1, p0, p105, p3, p5, p8, p9, p20, p96, p15, p16, p17, p18, p19, p21, p22, p23, p24, f111, p106, p102, i107, f99, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #283 INPLACE_ADD', 0) -f113 = getfield_gc_pure(p20, descr=) -f114 = float_add(f113, f111) -debug_merge_point(' #284 STORE_FAST', 0) -debug_merge_point(' #287 JUMP_ABSOLUTE', 0) -i116 = getfield_raw(38968960, descr=) -i118 = int_sub(i116, 26) -setfield_raw(38968960, i118, descr=) -i120 = int_lt(i118, 0) -guard_false(i120, descr=) [p1, p0, p3, p5, p8, p9, p15, p16, p17, p18, p19, p21, p22, p23, p24, f114, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, f51, None, i32] -debug_merge_point(' #125 FOR_ITER', 0) -jump(p0, p1, p3, p5, p8, p9, p15, p16, p17, p18, p19, f114, p21, p22, p23, p24, i32, f51, i36, i34, i33, i41, i42, i45, i50, i55, i63, f71, i43, f85, f89, descr=) -[5ed622ea316e] jit-log-opt-loop} -[5ed62326a846] {jit-log-opt-bridge -# bridge out of Guard 21 with 13 ops -[p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, f15, f16, i17] -i18 = force_token() -setfield_gc(p1, i18, descr=) -call_may_force(ConstClass(action_dispatcher), p0, p1, descr=) -guard_not_forced(, descr=) [p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, i17, f16, f15] -guard_no_exception(, descr=) [p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, i17, f16, f15] -debug_merge_point(' #125 FOR_ITER', 0) -p21 = new_with_vtable(19800744) -setfield_gc(p21, f15, descr=) -p23 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p23, i17, descr=) -p25 = new_with_vtable(19800744) -setfield_gc(p25, f16, descr=) -jump(p1, p0, ConstPtr(ptr26), p2, 2, p3, 0, 125, p4, p5, ConstPtr(ptr30), ConstPtr(ptr31), ConstPtr(ptr32), ConstPtr(ptr33), ConstPtr(ptr34), p6, p7, p8, p9, p10, p21, p11, p12, p13, p14, p23, p25, descr=) -[5ed62327d096] jit-log-opt-bridge} -[5ed623eb929c] {jit-log-opt-bridge -# bridge out of Guard 3 with 260 ops -[p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, i15, f16, f17] -debug_merge_point(' #290 POP_BLOCK', 0) -p18 = getfield_gc(p3, descr=) -guard_class(p3, 19865144, descr=) [p0, p1, p3, p18, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, i15, f16, f17] -i20 = getfield_gc(p3, descr=) -guard_value(i20, 1, descr=) [p0, p1, i20, p18, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, i15, f16, f17] -debug_merge_point(' #291 JUMP_ABSOLUTE', 0) -i23 = getfield_raw(38968960, descr=) -i25 = int_sub(i23, 1) -setfield_raw(38968960, i25, descr=) -i27 = int_lt(i25, 0) -guard_false(i27, descr=) [p0, p1, p18, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, i15, f16, f17] -debug_merge_point(' #99 FOR_ITER', 0) -guard_class(p5, 19861240, descr=) [p0, p1, p5, p18, p4, p6, p7, p8, p9, p10, p11, p12, p13, p14, i15, f16, f17] -i29 = getfield_gc(p5, descr=) -i31 = int_gt(i29, 0) -guard_true(i31, descr=) [p0, p1, p5, p18, p4, p6, p7, p8, p9, p10, p11, p12, p13, p14, i15, f16, f17] -i32 = getfield_gc(p5, descr=) -i33 = getfield_gc(p5, descr=) -i34 = int_add(i32, i33) -i36 = int_sub(i29, 1) -debug_merge_point(' #102 STORE_FAST', 0) -debug_merge_point(' #105 SETUP_LOOP', 0) -debug_merge_point(' #108 LOAD_GLOBAL', 0) -p37 = getfield_gc(p1, descr=) -setfield_gc(p5, i34, descr=) -setfield_gc(p5, i36, descr=) -guard_value(p37, ConstPtr(ptr38), descr=) [p0, p1, p37, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i32, p18, i15, f16, f17] -p39 = getfield_gc(p37, descr=) -guard_isnull(p39, descr=) [p0, p1, p39, p37, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i32, p18, i15, f16, f17] -p41 = getfield_gc(ConstPtr(ptr40), descr=) -guard_isnull(p41, descr=) [p0, p1, p41, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i32, p18, i15, f16, f17] -p43 = getfield_gc(ConstPtr(ptr42), descr=) -guard_value(p43, ConstPtr(ptr44), descr=) [p0, p1, p43, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i32, p18, i15, f16, f17] -p45 = getfield_gc(p43, descr=) -guard_isnull(p45, descr=) [p0, p1, p45, p43, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i32, p18, i15, f16, f17] -p47 = getfield_gc(ConstPtr(ptr46), descr=) -guard_value(p47, ConstPtr(ptr48), descr=) [p0, p1, p47, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i32, p18, i15, f16, f17] -debug_merge_point(' #111 LOAD_CONST', 0) -debug_merge_point(' #114 LOAD_FAST', 0) -guard_nonnull_class(p12, ConstClass(W_IntObject), descr=) [p0, p1, p12, p4, p5, p47, p6, p7, p8, p9, p10, p11, p13, i32, p18, i15, f16, f17] -debug_merge_point(' #117 LOAD_CONST', 0) -debug_merge_point(' #120 BINARY_SUBTRACT', 0) -i50 = getfield_gc_pure(p12, descr=) -i52 = int_sub_ovf(i50, 1) -guard_no_overflow(, descr=) [p0, p1, p12, i52, p4, p5, p47, p6, p7, p8, p9, p10, p11, p13, i32, p18, i15, f16, f17] -debug_merge_point(' #121 CALL_FUNCTION', 0) -p54 = getfield_gc(ConstPtr(ptr53), descr=) -p55 = getfield_gc(ConstPtr(ptr53), descr=) -i56 = getfield_gc_pure(p55, descr=) -guard_false(i56, descr=) [p0, p1, p54, p55, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i52, i32, p18, i15, f16, f17] -p57 = getfield_gc_pure(p55, descr=) -i58 = arraylen_gc(p57, descr=) -i60 = int_sub(4, i58) -i62 = int_ge(3, i60) -guard_true(i62, descr=) [p0, p1, p54, i60, p55, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i52, i32, p18, i15, f16, f17] -i63 = int_sub(3, i60) -i64 = getfield_gc_pure(p55, descr=) -guard_false(i64, descr=) [p0, p1, p54, i63, i60, p55, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i52, i32, p18, i15, f16, f17] -p65 = getfield_gc_pure(p55, descr=) -p66 = getarrayitem_gc(p65, i63, descr=) -guard_class(p66, ConstClass(W_IntObject), descr=) [p0, p1, p66, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i52, i32, p18, i15, f16, f17] -i68 = getfield_gc_pure(p66, descr=) -i69 = int_is_zero(i68) -guard_false(i69, descr=) [p0, p1, i68, i52, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p66, None, i32, p18, i15, f16, f17] -i72 = int_lt(i68, 0) -guard_false(i72, descr=) [p0, p1, i68, i52, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p66, None, i32, p18, i15, f16, f17] -i74 = int_lt(1, i52) -guard_true(i74, descr=) [p0, p1, i68, i52, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p66, None, i32, p18, i15, f16, f17] -i75 = int_sub(i52, 1) -i77 = int_sub(i75, 1) -i78 = uint_floordiv(i77, i68) -i80 = int_add(i78, 1) -i82 = int_lt(i80, 0) -guard_false(i82, descr=) [p0, p1, i68, i80, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p66, i52, i32, p18, i15, f16, f17] -debug_merge_point(' #124 GET_ITER', 0) -debug_merge_point(' #125 FOR_ITER', 0) -i84 = int_gt(i80, 0) -guard_true(i84, descr=) [p0, p1, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i80, i68, None, None, i32, p18, i15, f16, f17] -i85 = int_add(1, i68) -debug_merge_point(' #128 STORE_FAST', 0) -debug_merge_point(' #131 LOAD_FAST', 0) -guard_nonnull_class(p13, 19886912, descr=) [p0, p1, p13, p4, p5, p6, p7, p8, p9, p10, p11, p12, i78, i85, None, i68, None, None, i32, p18, None, f16, f17] -debug_merge_point(' #134 LOAD_FAST', 0) -debug_merge_point(' #137 LOAD_FAST', 0) -guard_nonnull_class(p11, ConstClass(W_IntObject), descr=) [p0, p1, p11, p4, p5, p13, p6, p7, p8, p9, p10, p12, i78, i85, None, i68, None, None, i32, p18, None, f16, f17] -debug_merge_point(' #140 BINARY_MULTIPLY', 0) -i88 = getfield_gc_pure(p11, descr=) -i89 = int_mul_ovf(i32, i88) -guard_no_overflow(, descr=) [p0, p1, p11, i89, p4, p5, p13, p6, p7, p8, p9, p10, p12, i78, i85, None, i68, None, None, i32, p18, None, f16, f17] -debug_merge_point(' #141 LOAD_FAST', 0) -debug_merge_point(' #144 BINARY_ADD', 0) -i90 = int_add_ovf(i89, 1) -guard_no_overflow(, descr=) [p0, p1, i90, p4, p5, p13, p6, p7, p8, p9, p10, p11, p12, i89, i78, i85, None, i68, None, None, i32, p18, None, f16, f17] -debug_merge_point(' #145 BINARY_SUBSCR', 0) -i91 = getfield_gc(p13, descr=) -i93 = int_lt(i90, 0) -guard_false(i93, descr=) [p0, p1, p13, i90, i91, p4, p5, p6, p7, p8, p9, p10, p11, p12, None, i78, i85, None, i68, None, None, i32, p18, None, f16, f17] -i94 = int_lt(i90, i91) -guard_true(i94, descr=) [p0, p1, p13, i90, p4, p5, p6, p7, p8, p9, p10, p11, p12, None, i78, i85, None, i68, None, None, i32, p18, None, f16, f17] -i95 = getfield_gc(p13, descr=) -f96 = getarrayitem_raw(i95, i90, descr=) -debug_merge_point(' #146 STORE_FAST', 0) -debug_merge_point(' #149 LOAD_FAST', 0) -debug_merge_point(' #152 LOAD_FAST', 0) -debug_merge_point(' #155 LOAD_CONST', 0) -debug_merge_point(' #158 BINARY_SUBTRACT', 0) -i98 = int_sub_ovf(i32, 1) -guard_no_overflow(, descr=) [p0, p1, i98, p4, p5, p13, p6, p7, p8, p9, p10, p11, p12, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #159 LOAD_FAST', 0) -debug_merge_point(' #162 BINARY_MULTIPLY', 0) -i99 = int_mul_ovf(i98, i88) -guard_no_overflow(, descr=) [p0, p1, p11, i99, p4, p5, p13, p6, p7, p8, p9, p10, p12, i98, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #163 LOAD_FAST', 0) -debug_merge_point(' #166 BINARY_ADD', 0) -i100 = int_add_ovf(i99, 1) -guard_no_overflow(, descr=) [p0, p1, i100, p4, p5, p13, p6, p7, p8, p9, p10, p11, p12, i99, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #167 BINARY_SUBSCR', 0) -i102 = int_lt(i100, 0) -guard_false(i102, descr=) [p0, p1, p13, i100, i91, p4, p5, p6, p7, p8, p9, p10, p11, p12, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -i103 = int_lt(i100, i91) -guard_true(i103, descr=) [p0, p1, p13, i100, p4, p5, p6, p7, p8, p9, p10, p11, p12, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -f104 = getarrayitem_raw(i95, i100, descr=) -debug_merge_point(' #168 LOAD_FAST', 0) -debug_merge_point(' #171 LOAD_FAST', 0) -debug_merge_point(' #174 LOAD_CONST', 0) -debug_merge_point(' #177 BINARY_ADD', 0) -i106 = int_add_ovf(i32, 1) -guard_no_overflow(, descr=) [p0, p1, i106, p4, p5, p13, p6, p7, p8, p9, p10, p11, p12, f104, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #178 LOAD_FAST', 0) -debug_merge_point(' #181 BINARY_MULTIPLY', 0) -i107 = int_mul_ovf(i106, i88) -guard_no_overflow(, descr=) [p0, p1, p11, i107, p4, p5, p13, p6, p7, p8, p9, p10, p12, i106, f104, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #182 LOAD_FAST', 0) -debug_merge_point(' #185 BINARY_ADD', 0) -i108 = int_add_ovf(i107, 1) -guard_no_overflow(, descr=) [p0, p1, i108, p4, p5, p13, p6, p7, p8, p9, p10, p11, p12, i107, None, f104, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #186 BINARY_SUBSCR', 0) -i110 = int_lt(i108, 0) -guard_false(i110, descr=) [p0, p1, p13, i108, i91, p4, p5, p6, p7, p8, p9, p10, p11, p12, None, None, f104, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -i111 = int_lt(i108, i91) -guard_true(i111, descr=) [p0, p1, p13, i108, p4, p5, p6, p7, p8, p9, p10, p11, p12, None, None, f104, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -f112 = getarrayitem_raw(i95, i108, descr=) -debug_merge_point(' #187 BINARY_ADD', 0) -f113 = float_add(f104, f112) -debug_merge_point(' #188 LOAD_FAST', 0) -guard_nonnull_class(p9, 19800744, descr=) [p0, p1, p9, p4, p5, p6, p7, p8, p10, p11, p12, p13, f113, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #191 BINARY_MULTIPLY', 0) -f115 = getfield_gc_pure(p9, descr=) -f116 = float_mul(f113, f115) -debug_merge_point(' #192 LOAD_FAST', 0) -debug_merge_point(' #195 LOAD_FAST', 0) -debug_merge_point(' #198 LOAD_FAST', 0) -debug_merge_point(' #201 BINARY_MULTIPLY', 0) -debug_merge_point(' #202 LOAD_FAST', 0) -debug_merge_point(' #205 BINARY_ADD', 0) -debug_merge_point(' #206 LOAD_CONST', 0) -debug_merge_point(' #209 BINARY_SUBTRACT', 0) -debug_merge_point(' #210 BINARY_SUBSCR', 0) -i118 = int_lt(i89, 0) -guard_false(i118, descr=) [p0, p1, p13, i89, i91, p4, p5, p6, p7, p8, p9, p10, p11, p12, f116, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -i119 = int_lt(i89, i91) -guard_true(i119, descr=) [p0, p1, p13, i89, p4, p5, p6, p7, p8, p9, p10, p11, p12, f116, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -f120 = getarrayitem_raw(i95, i89, descr=) -debug_merge_point(' #211 LOAD_FAST', 0) -debug_merge_point(' #214 LOAD_FAST', 0) -debug_merge_point(' #217 LOAD_FAST', 0) -debug_merge_point(' #220 BINARY_MULTIPLY', 0) -debug_merge_point(' #221 LOAD_FAST', 0) -debug_merge_point(' #224 BINARY_ADD', 0) -debug_merge_point(' #225 LOAD_CONST', 0) -debug_merge_point(' #228 BINARY_ADD', 0) -i122 = int_add(i90, 1) -debug_merge_point(' #229 BINARY_SUBSCR', 0) -i123 = int_lt(i122, i91) -guard_true(i123, descr=) [p0, p1, p13, i122, p4, p5, p6, p7, p8, p9, p10, p11, p12, f120, f116, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -f124 = getarrayitem_raw(i95, i122, descr=) -debug_merge_point(' #230 BINARY_ADD', 0) -f125 = float_add(f120, f124) -debug_merge_point(' #231 LOAD_FAST', 0) -guard_nonnull_class(p8, 19800744, descr=) [p0, p1, p8, p4, p5, p6, p7, p9, p10, p11, p12, p13, f125, None, f116, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #234 BINARY_MULTIPLY', 0) -f127 = getfield_gc_pure(p8, descr=) -f128 = float_mul(f125, f127) -debug_merge_point(' #235 BINARY_ADD', 0) -f129 = float_add(f116, f128) -debug_merge_point(' #236 LOAD_FAST', 0) -guard_nonnull_class(p10, 19800744, descr=) [p0, p1, p10, p4, p5, p6, p7, p8, p9, p11, p12, p13, f129, None, None, None, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #239 BINARY_MULTIPLY', 0) -f131 = getfield_gc_pure(p10, descr=) -f132 = float_mul(f129, f131) -debug_merge_point(' #240 LOAD_FAST', 0) -debug_merge_point(' #243 LOAD_FAST', 0) -debug_merge_point(' #246 LOAD_FAST', 0) -debug_merge_point(' #249 BINARY_MULTIPLY', 0) -debug_merge_point(' #250 LOAD_FAST', 0) -debug_merge_point(' #253 BINARY_ADD', 0) -debug_merge_point(' #254 STORE_SUBSCR', 0) -setarrayitem_raw(i95, i90, f132, descr=) -debug_merge_point(' #255 LOAD_FAST', 0) -debug_merge_point(' #258 LOAD_GLOBAL', 0) -p134 = getfield_gc(ConstPtr(ptr133), descr=) -guard_nonnull_class(p134, ConstClass(Function), descr=) [p0, p1, p134, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, None, None, None, None, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #261 LOAD_FAST', 0) -debug_merge_point(' #264 LOAD_FAST', 0) -debug_merge_point(' #267 LOAD_FAST', 0) -debug_merge_point(' #270 BINARY_MULTIPLY', 0) -debug_merge_point(' #271 LOAD_FAST', 0) -debug_merge_point(' #274 BINARY_ADD', 0) -debug_merge_point(' #275 BINARY_SUBSCR', 0) -f136 = getarrayitem_raw(i95, i90, descr=) -debug_merge_point(' #276 LOAD_FAST', 0) -debug_merge_point(' #279 BINARY_SUBTRACT', 0) -f137 = float_sub(f136, f96) -debug_merge_point(' #280 CALL_FUNCTION', 0) -p138 = getfield_gc(p134, descr=) -guard_value(p138, ConstPtr(ptr139), descr=) [p0, p1, p138, p134, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f137, None, None, None, None, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -p140 = getfield_gc(p134, descr=) -p141 = getfield_gc(p134, descr=) -p143 = call(ConstClass(getexecutioncontext), descr=) -p144 = getfield_gc(p143, descr=) -i145 = force_token() -p146 = getfield_gc(p143, descr=) -guard_isnull(p146, descr=) [p0, p1, p143, p146, p4, p5, p134, p6, p7, p8, p9, p10, p11, p12, p13, p144, i145, p140, f137, None, None, None, None, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -i147 = getfield_gc(p143, descr=) -i148 = int_is_zero(i147) -guard_true(i148, descr=) [p0, p1, p143, p4, p5, p134, p6, p7, p8, p9, p10, p11, p12, p13, p144, i145, p140, f137, None, None, None, None, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #0 LOAD_FAST', 1) -debug_merge_point(' #3 LOAD_FAST', 1) -debug_merge_point(' #6 BINARY_MULTIPLY', 1) -f149 = float_mul(f137, f137) -debug_merge_point(' #7 RETURN_VALUE', 1) -i150 = int_is_true(i147) -guard_false(i150, descr=) [p0, p1, p143, p4, p5, p134, p6, p7, p8, p9, p10, p11, p12, p13, f149, p144, i145, p140, f137, None, None, None, None, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, f17] -debug_merge_point(' #283 INPLACE_ADD', 0) -f151 = float_add(f17, f149) -debug_merge_point(' #284 STORE_FAST', 0) -debug_merge_point(' #287 JUMP_ABSOLUTE', 0) -i153 = getfield_raw(38968960, descr=) -i155 = int_sub(i153, 35) -setfield_raw(38968960, i155, descr=) -i157 = int_lt(i155, 0) -guard_false(i157, descr=) [p0, p1, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f151, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, f96, None, i78, i85, None, i68, None, None, i32, p18, None, None, None] -debug_merge_point(' #125 FOR_ITER', 0) -p159 = new_with_vtable(19865144) -setfield_gc(p159, 291, descr=) -setfield_gc(p159, 1, descr=) -setfield_gc(p159, p18, descr=) -p163 = new_with_vtable(19861240) -setfield_gc(p163, i85, descr=) -setfield_gc(p163, i78, descr=) -setfield_gc(p163, i68, descr=) -p165 = new_with_vtable(19800744) -setfield_gc(p165, f151, descr=) -p167 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p167, i32, descr=) -p169 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p169, 1, descr=) -p171 = new_with_vtable(19800744) -setfield_gc(p171, f96, descr=) -jump(p1, p0, ConstPtr(ptr172), p159, 2, p4, 0, 125, p5, p163, ConstPtr(ptr176), ConstPtr(ptr177), ConstPtr(ptr178), ConstPtr(ptr179), ConstPtr(ptr180), p6, p7, p8, p9, p10, p165, p11, p12, p13, p167, p169, p171, descr=) -[5ed623fc609b] jit-log-opt-bridge} -[5ed63ea5fa94] {jit-log-opt-bridge -# bridge out of Guard 110 with 23 ops -[p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, f12, f13, i14, i15, i16, i17, p18] -i19 = force_token() -setfield_gc(p1, i19, descr=) -call_may_force(ConstClass(action_dispatcher), p0, p1, descr=) -guard_not_forced(, descr=) [p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, i14, i17, i16, i15, f12, f13, p18] -guard_no_exception(, descr=) [p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, i14, i17, i16, i15, f12, f13, p18] -debug_merge_point(' #125 FOR_ITER', 0) -p22 = new_with_vtable(19865144) -setfield_gc(p22, 291, descr=) -setfield_gc(p22, p18, descr=) -setfield_gc(p22, 1, descr=) -p26 = new_with_vtable(19861240) -setfield_gc(p26, i15, descr=) -setfield_gc(p26, i14, descr=) -setfield_gc(p26, i16, descr=) -p28 = new_with_vtable(19800744) -setfield_gc(p28, f12, descr=) -p30 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p30, i17, descr=) -p32 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p32, 1, descr=) -p35 = new_with_vtable(19800744) -setfield_gc(p35, f13, descr=) -jump(p1, p0, ConstPtr(ptr36), p22, 2, p2, 0, 125, p3, p26, ConstPtr(ptr40), ConstPtr(ptr41), ConstPtr(ptr42), ConstPtr(ptr43), ConstPtr(ptr44), p4, p5, p6, p7, p8, p28, p9, p10, p11, p30, p32, p35, descr=) -[5ed63ea8ea04] jit-log-opt-bridge} -[5ed640a0a34c] {jit-log-opt-bridge -# bridge out of Guard 58 with 13 ops -[p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, f15, f16, i17] -i18 = force_token() -setfield_gc(p1, i18, descr=) -call_may_force(ConstClass(action_dispatcher), p0, p1, descr=) -guard_not_forced(, descr=) [p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, f15, f16, i17] -guard_no_exception(, descr=) [p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, f15, f16, i17] -debug_merge_point(' #125 FOR_ITER', 0) -p21 = new_with_vtable(19800744) -setfield_gc(p21, f15, descr=) -p23 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p23, i17, descr=) -p25 = new_with_vtable(19800744) -setfield_gc(p25, f16, descr=) -jump(p1, p0, ConstPtr(ptr26), p2, 2, p3, 0, 125, p4, p5, ConstPtr(ptr30), ConstPtr(ptr31), ConstPtr(ptr32), ConstPtr(ptr33), ConstPtr(ptr34), p6, p7, p8, p9, p10, p21, p11, p12, p13, p14, p23, p25, descr=) -[5ed640a1e8c2] jit-log-opt-bridge} -[5ed6431fc824] {jit-log-opt-bridge -# bridge out of Guard 24 with 264 ops -[p0, p1, p2, p3, p4, p5, i6, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -guard_value(i6, 0, descr=) [i6, p0, p1, p3, p4, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -debug_merge_point(' #290 POP_BLOCK', 0) -p26 = getfield_gc(p4, descr=) -guard_class(p4, 19865144, descr=) [p0, p1, p4, p3, p26, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -i28 = getfield_gc(p4, descr=) -guard_value(i28, 1, descr=) [p0, p1, i28, p3, p26, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -debug_merge_point(' #291 JUMP_ABSOLUTE', 0) -i31 = getfield_raw(38968960, descr=) -i33 = int_sub(i31, 1) -setfield_raw(38968960, i33, descr=) -i35 = int_lt(i33, 0) -guard_false(i35, descr=) [p0, p1, p3, p26, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -guard_value(p3, ConstPtr(ptr36), descr=) [p0, p1, p3, p26, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -debug_merge_point(' #99 FOR_ITER', 0) -guard_class(p7, 19861240, descr=) [p0, p1, p7, p26, p5, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -i38 = getfield_gc(p7, descr=) -i40 = int_gt(i38, 0) -guard_true(i40, descr=) [p0, p1, p7, p26, p5, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p22, p23, p24] -i41 = getfield_gc(p7, descr=) -i42 = getfield_gc(p7, descr=) -i43 = int_add(i41, i42) -i45 = int_sub(i38, 1) -debug_merge_point(' #102 STORE_FAST', 0) -debug_merge_point(' #105 SETUP_LOOP', 0) -debug_merge_point(' #108 LOAD_GLOBAL', 0) -p46 = getfield_gc(p1, descr=) -setfield_gc(p7, i43, descr=) -setfield_gc(p7, i45, descr=) -guard_value(p46, ConstPtr(ptr47), descr=) [p0, p1, p46, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p26, i41] -p48 = getfield_gc(p46, descr=) -guard_isnull(p48, descr=) [p0, p1, p48, p46, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p26, i41] -p50 = getfield_gc(ConstPtr(ptr49), descr=) -guard_isnull(p50, descr=) [p0, p1, p50, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p26, i41] -p52 = getfield_gc(ConstPtr(ptr51), descr=) -guard_value(p52, ConstPtr(ptr53), descr=) [p0, p1, p52, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p26, i41] -p54 = getfield_gc(p52, descr=) -guard_isnull(p54, descr=) [p0, p1, p54, p52, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p26, i41] -p56 = getfield_gc(ConstPtr(ptr55), descr=) -guard_value(p56, ConstPtr(ptr57), descr=) [p0, p1, p56, p5, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p26, i41] -debug_merge_point(' #111 LOAD_CONST', 0) -debug_merge_point(' #114 LOAD_FAST', 0) -guard_nonnull_class(p20, ConstClass(W_IntObject), descr=) [p0, p1, p20, p5, p7, p56, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p21, p23, p24, p26, i41] -debug_merge_point(' #117 LOAD_CONST', 0) -debug_merge_point(' #120 BINARY_SUBTRACT', 0) -i59 = getfield_gc_pure(p20, descr=) -i61 = int_sub_ovf(i59, 1) -guard_no_overflow(, descr=) [p0, p1, p20, i61, p5, p7, p56, p11, p12, p13, p14, p15, p16, p17, p18, p19, p21, p23, p24, p26, i41] -debug_merge_point(' #121 CALL_FUNCTION', 0) -p63 = getfield_gc(ConstPtr(ptr62), descr=) -p64 = getfield_gc(ConstPtr(ptr62), descr=) -i65 = getfield_gc_pure(p64, descr=) -guard_false(i65, descr=) [p0, p1, p63, p64, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, i61, p26, i41] -p66 = getfield_gc_pure(p64, descr=) -i67 = arraylen_gc(p66, descr=) -i69 = int_sub(4, i67) -i71 = int_ge(3, i69) -guard_true(i71, descr=) [p0, p1, p63, i69, p64, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, i61, p26, i41] -i72 = int_sub(3, i69) -i73 = getfield_gc_pure(p64, descr=) -guard_false(i73, descr=) [p0, p1, p63, i72, i69, p64, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, i61, p26, i41] -p74 = getfield_gc_pure(p64, descr=) -p75 = getarrayitem_gc(p74, i72, descr=) -guard_class(p75, ConstClass(W_IntObject), descr=) [p0, p1, p75, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, i61, p26, i41] -i77 = getfield_gc_pure(p75, descr=) -i78 = int_is_zero(i77) -guard_false(i78, descr=) [p0, p1, i77, i61, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p75, None, p26, i41] -i81 = int_lt(i77, 0) -guard_false(i81, descr=) [p0, p1, i77, i61, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p75, None, p26, i41] -i83 = int_lt(1, i61) -guard_true(i83, descr=) [p0, p1, i77, i61, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p75, None, p26, i41] -i84 = int_sub(i61, 1) -i86 = int_sub(i84, 1) -i87 = uint_floordiv(i86, i77) -i89 = int_add(i87, 1) -i91 = int_lt(i89, 0) -guard_false(i91, descr=) [p0, p1, i77, i89, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, p75, i61, p26, i41] -debug_merge_point(' #124 GET_ITER', 0) -debug_merge_point(' #125 FOR_ITER', 0) -i93 = int_gt(i89, 0) -guard_true(i93, descr=) [p0, p1, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p21, p23, p24, i89, i77, None, None, p26, i41] -i94 = int_add(1, i77) -debug_merge_point(' #128 STORE_FAST', 0) -debug_merge_point(' #131 LOAD_FAST', 0) -guard_nonnull_class(p21, 19886912, descr=) [p0, p1, p21, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p24, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #134 LOAD_FAST', 0) -debug_merge_point(' #137 LOAD_FAST', 0) -guard_nonnull_class(p19, ConstClass(W_IntObject), descr=) [p0, p1, p19, p5, p7, p21, p11, p12, p13, p14, p15, p16, p17, p18, p20, p24, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #140 BINARY_MULTIPLY', 0) -i97 = getfield_gc_pure(p19, descr=) -i98 = int_mul_ovf(i41, i97) -guard_no_overflow(, descr=) [p0, p1, p19, i98, p5, p7, p21, p11, p12, p13, p14, p15, p16, p17, p18, p20, p24, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #141 LOAD_FAST', 0) -debug_merge_point(' #144 BINARY_ADD', 0) -i99 = int_add_ovf(i98, 1) -guard_no_overflow(, descr=) [p0, p1, i99, p5, p7, p21, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p24, i98, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #145 BINARY_SUBSCR', 0) -i100 = getfield_gc(p21, descr=) -i102 = int_lt(i99, 0) -guard_false(i102, descr=) [p0, p1, p21, i99, i100, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p24, None, i87, i94, None, i77, None, None, p26, i41] -i103 = int_lt(i99, i100) -guard_true(i103, descr=) [p0, p1, p21, i99, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, p24, None, i87, i94, None, i77, None, None, p26, i41] -i104 = getfield_gc(p21, descr=) -f105 = getarrayitem_raw(i104, i99, descr=) -debug_merge_point(' #146 STORE_FAST', 0) -debug_merge_point(' #149 LOAD_FAST', 0) -debug_merge_point(' #152 LOAD_FAST', 0) -debug_merge_point(' #155 LOAD_CONST', 0) -debug_merge_point(' #158 BINARY_SUBTRACT', 0) -i107 = int_sub_ovf(i41, 1) -guard_no_overflow(, descr=) [p0, p1, i107, p5, p7, p21, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #159 LOAD_FAST', 0) -debug_merge_point(' #162 BINARY_MULTIPLY', 0) -i108 = int_mul_ovf(i107, i97) -guard_no_overflow(, descr=) [p0, p1, p19, i108, p5, p7, p21, p11, p12, p13, p14, p15, p16, p17, p18, p20, i107, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #163 LOAD_FAST', 0) -debug_merge_point(' #166 BINARY_ADD', 0) -i109 = int_add_ovf(i108, 1) -guard_no_overflow(, descr=) [p0, p1, i109, p5, p7, p21, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, i108, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #167 BINARY_SUBSCR', 0) -i111 = int_lt(i109, 0) -guard_false(i111, descr=) [p0, p1, p21, i109, i100, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -i112 = int_lt(i109, i100) -guard_true(i112, descr=) [p0, p1, p21, i109, p5, p7, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -f113 = getarrayitem_raw(i104, i109, descr=) -debug_merge_point(' #168 LOAD_FAST', 0) -debug_merge_point(' #171 LOAD_FAST', 0) -debug_merge_point(' #174 LOAD_CONST', 0) -debug_merge_point(' #177 BINARY_ADD', 0) -i115 = int_add_ovf(i41, 1) -guard_no_overflow(, descr=) [p0, p1, i115, p5, p7, p21, p12, p13, p14, p15, p16, p17, p18, p19, p20, f113, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #178 LOAD_FAST', 0) -debug_merge_point(' #181 BINARY_MULTIPLY', 0) -i116 = int_mul_ovf(i115, i97) -guard_no_overflow(, descr=) [p0, p1, p19, i116, p5, p7, p21, p12, p13, p14, p15, p16, p17, p18, p20, i115, f113, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #182 LOAD_FAST', 0) -debug_merge_point(' #185 BINARY_ADD', 0) -i117 = int_add_ovf(i116, 1) -guard_no_overflow(, descr=) [p0, p1, i117, p5, p7, p21, p12, p13, p14, p15, p16, p17, p18, p19, p20, i116, None, f113, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #186 BINARY_SUBSCR', 0) -i119 = int_lt(i117, 0) -guard_false(i119, descr=) [p0, p1, p21, i117, i100, p5, p7, p12, p13, p14, p15, p16, p17, p18, p19, p20, None, None, f113, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -i120 = int_lt(i117, i100) -guard_true(i120, descr=) [p0, p1, p21, i117, p5, p7, p12, p13, p14, p15, p16, p17, p18, p19, p20, None, None, f113, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -f121 = getarrayitem_raw(i104, i117, descr=) -debug_merge_point(' #187 BINARY_ADD', 0) -f122 = float_add(f113, f121) -debug_merge_point(' #188 LOAD_FAST', 0) -guard_nonnull_class(p16, 19800744, descr=) [p0, p1, p16, p5, p7, p12, p13, p14, p15, p17, p18, p19, p20, p21, f122, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #191 BINARY_MULTIPLY', 0) -f124 = getfield_gc_pure(p16, descr=) -f125 = float_mul(f122, f124) -debug_merge_point(' #192 LOAD_FAST', 0) -debug_merge_point(' #195 LOAD_FAST', 0) -debug_merge_point(' #198 LOAD_FAST', 0) -debug_merge_point(' #201 BINARY_MULTIPLY', 0) -debug_merge_point(' #202 LOAD_FAST', 0) -debug_merge_point(' #205 BINARY_ADD', 0) -debug_merge_point(' #206 LOAD_CONST', 0) -debug_merge_point(' #209 BINARY_SUBTRACT', 0) -debug_merge_point(' #210 BINARY_SUBSCR', 0) -i127 = int_lt(i98, 0) -guard_false(i127, descr=) [p0, p1, p21, i98, i100, p5, p7, p12, p13, p14, p15, p16, p17, p18, p19, p20, f125, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -i128 = int_lt(i98, i100) -guard_true(i128, descr=) [p0, p1, p21, i98, p5, p7, p12, p13, p14, p15, p16, p17, p18, p19, p20, f125, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -f129 = getarrayitem_raw(i104, i98, descr=) -debug_merge_point(' #211 LOAD_FAST', 0) -debug_merge_point(' #214 LOAD_FAST', 0) -debug_merge_point(' #217 LOAD_FAST', 0) -debug_merge_point(' #220 BINARY_MULTIPLY', 0) -debug_merge_point(' #221 LOAD_FAST', 0) -debug_merge_point(' #224 BINARY_ADD', 0) -debug_merge_point(' #225 LOAD_CONST', 0) -debug_merge_point(' #228 BINARY_ADD', 0) -i131 = int_add(i99, 1) -debug_merge_point(' #229 BINARY_SUBSCR', 0) -i132 = int_lt(i131, i100) -guard_true(i132, descr=) [p0, p1, p21, i131, p5, p7, p13, p14, p15, p16, p17, p18, p19, p20, f129, f125, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -f133 = getarrayitem_raw(i104, i131, descr=) -debug_merge_point(' #230 BINARY_ADD', 0) -f134 = float_add(f129, f133) -debug_merge_point(' #231 LOAD_FAST', 0) -guard_nonnull_class(p15, 19800744, descr=) [p0, p1, p15, p5, p7, p13, p14, p16, p17, p18, p19, p20, p21, f134, None, f125, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #234 BINARY_MULTIPLY', 0) -f136 = getfield_gc_pure(p15, descr=) -f137 = float_mul(f134, f136) -debug_merge_point(' #235 BINARY_ADD', 0) -f138 = float_add(f125, f137) -debug_merge_point(' #236 LOAD_FAST', 0) -guard_nonnull_class(p17, 19800744, descr=) [p0, p1, p17, p5, p7, p13, p14, p15, p16, p18, p19, p20, p21, f138, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #239 BINARY_MULTIPLY', 0) -f140 = getfield_gc_pure(p17, descr=) -f141 = float_mul(f138, f140) -debug_merge_point(' #240 LOAD_FAST', 0) -debug_merge_point(' #243 LOAD_FAST', 0) -debug_merge_point(' #246 LOAD_FAST', 0) -debug_merge_point(' #249 BINARY_MULTIPLY', 0) -debug_merge_point(' #250 LOAD_FAST', 0) -debug_merge_point(' #253 BINARY_ADD', 0) -debug_merge_point(' #254 STORE_SUBSCR', 0) -setarrayitem_raw(i104, i99, f141, descr=) -debug_merge_point(' #255 LOAD_FAST', 0) -guard_nonnull_class(p18, 19800744, descr=) [p0, p1, p18, p5, p7, p13, p14, p15, p16, p17, p19, p20, p21, None, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #258 LOAD_GLOBAL', 0) -p144 = getfield_gc(ConstPtr(ptr143), descr=) -guard_nonnull_class(p144, ConstClass(Function), descr=) [p0, p1, p144, p5, p7, p18, p13, p14, p15, p16, p17, p19, p20, p21, None, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #261 LOAD_FAST', 0) -debug_merge_point(' #264 LOAD_FAST', 0) -debug_merge_point(' #267 LOAD_FAST', 0) -debug_merge_point(' #270 BINARY_MULTIPLY', 0) -debug_merge_point(' #271 LOAD_FAST', 0) -debug_merge_point(' #274 BINARY_ADD', 0) -debug_merge_point(' #275 BINARY_SUBSCR', 0) -f146 = getarrayitem_raw(i104, i99, descr=) -debug_merge_point(' #276 LOAD_FAST', 0) -debug_merge_point(' #279 BINARY_SUBTRACT', 0) -f147 = float_sub(f146, f105) -debug_merge_point(' #280 CALL_FUNCTION', 0) -p148 = getfield_gc(p144, descr=) -guard_value(p148, ConstPtr(ptr149), descr=) [p0, p1, p148, p144, p5, p7, p18, p13, p14, p15, p16, p17, p19, p20, p21, f147, None, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -p150 = getfield_gc(p144, descr=) -p151 = getfield_gc(p144, descr=) -p153 = call(ConstClass(getexecutioncontext), descr=) -p154 = getfield_gc(p153, descr=) -i155 = force_token() -p156 = getfield_gc(p153, descr=) -guard_isnull(p156, descr=) [p0, p1, p153, p156, p5, p7, p18, p144, p13, p14, p15, p16, p17, p19, p20, p21, p150, p154, i155, f147, None, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -i157 = getfield_gc(p153, descr=) -i158 = int_is_zero(i157) -guard_true(i158, descr=) [p0, p1, p153, p5, p7, p18, p144, p13, p14, p15, p16, p17, p19, p20, p21, p150, p154, i155, f147, None, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #0 LOAD_FAST', 1) -debug_merge_point(' #3 LOAD_FAST', 1) -debug_merge_point(' #6 BINARY_MULTIPLY', 1) -f159 = float_mul(f147, f147) -debug_merge_point(' #7 RETURN_VALUE', 1) -i160 = int_is_true(i157) -guard_false(i160, descr=) [p0, p1, p153, p5, p7, p18, p144, p13, p14, p15, p16, p17, p19, p20, p21, f159, p150, p154, i155, f147, None, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #283 INPLACE_ADD', 0) -f161 = getfield_gc_pure(p18, descr=) -f162 = float_add(f161, f159) -debug_merge_point(' #284 STORE_FAST', 0) -debug_merge_point(' #287 JUMP_ABSOLUTE', 0) -i164 = getfield_raw(38968960, descr=) -i166 = int_sub(i164, 34) -setfield_raw(38968960, i166, descr=) -i168 = int_lt(i166, 0) -guard_false(i168, descr=) [p0, p1, p5, p7, p13, p14, p15, p16, p17, p19, p20, p21, f162, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, f105, None, i87, i94, None, i77, None, None, p26, i41] -debug_merge_point(' #125 FOR_ITER', 0) -p170 = new_with_vtable(19865144) -setfield_gc(p170, 291, descr=) -setfield_gc(p170, 1, descr=) -setfield_gc(p170, p26, descr=) -p174 = new_with_vtable(19861240) -setfield_gc(p174, i94, descr=) -setfield_gc(p174, i87, descr=) -setfield_gc(p174, i77, descr=) -p176 = new_with_vtable(19800744) -setfield_gc(p176, f162, descr=) -p178 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p178, i41, descr=) -p180 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p180, 1, descr=) -p182 = new_with_vtable(19800744) -setfield_gc(p182, f105, descr=) -jump(p1, p0, ConstPtr(ptr183), p170, 2, p5, 0, 125, p7, p174, ConstPtr(ptr187), ConstPtr(ptr188), ConstPtr(ptr189), ConstPtr(ptr190), ConstPtr(ptr191), p13, p14, p15, p16, p17, p176, p19, p20, p21, p178, p180, p182, descr=) -[5ed6432f4a2c] jit-log-opt-bridge} -[5ed66199330c] {jit-log-opt-bridge -# bridge out of Guard 65 with 72 ops -[p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, i14, f15, f16] -debug_merge_point(' #294 POP_BLOCK', 0) -p17 = getfield_gc(p3, descr=) -guard_class(p3, 19865144, descr=) [p0, p1, p3, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -i19 = getfield_gc(p3, descr=) -guard_value(i19, 0, descr=) [p0, p1, i19, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -debug_merge_point(' #295 LOAD_GLOBAL', 0) -p21 = getfield_gc(p1, descr=) -guard_value(p21, ConstPtr(ptr22), descr=) [p0, p1, p21, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -p23 = getfield_gc(p21, descr=) -guard_isnull(p23, descr=) [p0, p1, p23, p21, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -p25 = getfield_gc(ConstPtr(ptr24), descr=) -guard_nonnull_class(p25, 19905496, descr=) [p0, p1, p25, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -debug_merge_point(' #298 LOOKUP_METHOD', 0) -p27 = getfield_gc(p25, descr=) -guard_value(p27, ConstPtr(ptr28), descr=) [p0, p1, p25, p27, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -p29 = getfield_gc(p27, descr=) -guard_isnull(p29, descr=) [p0, p1, p25, p29, p27, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -p31 = getfield_gc(ConstPtr(ptr30), descr=) -guard_value(p31, ConstPtr(ptr32), descr=) [p0, p1, p31, p25, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -debug_merge_point(' #301 LOAD_FAST', 0) -debug_merge_point(' #304 CALL_METHOD', 0) -call(ConstClass(set_errno), 0, descr=) -f36 = call(ConstClass(sqrt), f16, descr=) -i38 = call(ConstClass(get_errno), descr=) -i39 = float_ne(f36, f36) -guard_false(i39, descr=) [p0, p1, i38, f36, f16, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, None, i14, f15] -i42 = float_eq(f36, inf) -i44 = float_eq(f36, -inf) -i45 = int_or(i42, i44) -i46 = int_is_true(i45) -guard_false(i46, descr=) [p0, p1, i38, f36, f16, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, None, i14, f15] -i47 = int_is_true(i38) -guard_false(i47, descr=) [p0, p1, i38, f36, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f16, i14, f15] -debug_merge_point(' #307 RETURN_VALUE', 0) -guard_isnull(p17, descr=) [p0, p1, p17, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, f36, f16, i14, f15] -p48 = getfield_gc(p1, descr=) -setarrayitem_gc(p48, 0, ConstPtr(ptr50), descr=) -setarrayitem_gc(p48, 1, ConstPtr(ptr52), descr=) -setarrayitem_gc(p48, 2, ConstPtr(ptr52), descr=) -setarrayitem_gc(p48, 3, ConstPtr(ptr55), descr=) -setarrayitem_gc(p48, 4, ConstPtr(ptr55), descr=) -setarrayitem_gc(p48, 5, ConstPtr(ptr55), descr=) -setarrayitem_gc(p48, 6, ConstPtr(ptr55), descr=) -setarrayitem_gc(p48, 7, p5, descr=) -p60 = getfield_gc(p1, descr=) -setarrayitem_gc(p60, 0, p6, descr=) -setarrayitem_gc(p60, 1, p7, descr=) -setarrayitem_gc(p60, 2, p8, descr=) -setarrayitem_gc(p60, 3, p9, descr=) -p66 = new_with_vtable(19800744) -setfield_gc(p66, f16, descr=) -setarrayitem_gc(p60, 4, p66, descr=) -setarrayitem_gc(p60, 5, p10, descr=) -setarrayitem_gc(p60, 6, p11, descr=) -setarrayitem_gc(p60, 7, p12, descr=) -setarrayitem_gc(p60, 8, p13, descr=) -p73 = new_with_vtable(ConstClass(W_IntObject)) -setfield_gc(p73, i14, descr=) -setarrayitem_gc(p60, 9, p73, descr=) -p76 = new_with_vtable(19800744) -setfield_gc(p76, f15, descr=) diff --git a/tests/examplefiles/test.r3 b/tests/examplefiles/test.r3 deleted file mode 100644 index 707102db..00000000 --- a/tests/examplefiles/test.r3 +++ /dev/null @@ -1,114 +0,0 @@ -preface.... everything what is before header is not evaluated -so this should not be colorized: -1 + 2 - -REBOL [] ;<- this is minimal header, everything behind it must be colorized - -;## String tests ## -print "Hello ^"World" ;<- with escaped char -multiline-string: { - bla bla "bla" {bla} -} -char-a: #"a" -escaped-a: #"^(61)" -new-line: #"^/" - -;## Binaries ## -print decompress 64#{eJzLSM3JyQcABiwCFQUAAAA=} -;2#{0000 00000} ;<- this one is invalid! -2#{} -#{FF00} - -;##Date + time ## -1-Feb-2009 -1-Feb-2009/2:24:46+1:0 -1:0 1:1:1 -0:1.1 - -;## Tuple ## -red: 255.0.0 -red-with-alpha: 255.0.0.100 - -;## url!, file! and email! ## -aaa@bbb.cz -http:// -dns:// -tcp://127.0.0.1 -%/c/rebol/ -%"c:\Program Files\" -%/c/Program%20Files/ -to-rebol-file "c:\Program Files\" -suffix? %bla.swf - -;## Money ## -$1 --$1.2 -USA$100 - -;## Tag! ## - - - -;## Pair! ## -10x200 - -;## Issue! ## -type? #ff0000 ;== issue! - -;## some numbers ## -to integer! (1 + (x / 4.5) * 1E-4) - -;## some spec comments -1 + 1 -comment "aa" -2 + 2 -comment {aa} -3 + 3 -comment {a^{} -4 + 4 -comment {{}} -5 + 5 -comment { - foo: 6 -} -6 + 6 -comment [foo: 6] -7 + 7 -comment [foo: "[" ] -8 + 8 -comment [foo: {^{} ] -9 + 9 -comment [foo: {boo} ] -10 + 10 -comment 5-May-2014/11:17:34+2:00 -5-May-2014/11:17:34+2:00 11 + 11 - -;## other tests ## ----: 1 -x/(1 + n)/y -b/:1 - -;## and... -REBOL [ - purpose: { - reads css file and creates html from it - so one can see how the styles looks like - } -] -style: %default -out: rejoin [{ - - - Pygments style: } style {.css - - - -
    -}]
    -css: read/lines join style %.css
    -foreach line css [
    -    parse line [".syntax ." copy c to " " thru "/*" copy t to "*/" to end (
    -        append out rejoin ["" t "^/"])
    -    ]
    -]
    -write join style %.html join out "
    " -halt diff --git a/tests/examplefiles/test.rb b/tests/examplefiles/test.rb deleted file mode 100644 index 8ac102e6..00000000 --- a/tests/examplefiles/test.rb +++ /dev/null @@ -1,177 +0,0 @@ -a.each{|el|anz[el]=anz[el]?anz[el]+1:1} -while x<10000 -#a bis f dienen dazu die Nachbarschaft festzulegen. Man stelle sich die #Zahl von 1 bis 64 im Binärcode vor 1 bedeutet an 0 aus - b=(p[x]%32)/16<1 ? 0 : 1 - - (x-102>=0? n[x-102].to_i : 0)*a+(x-101>=0?n[x-101].to_i : 0)*e+n[x-100].to_i+(x-99>=0? n[x-99].to_i : 0)*f+(x-98>=0? n[x-98].to_i : 0)*a+ - n[x+199].to_i*b+n[x+200].to_i*d+n[x+201].to_i*b - -#und die Ausgabe folgt -g=%w{} -x=0 - -#leere regex -test //, 123 - -while x<100 - puts"#{g[x]}" - x+=1 -end - -puts"" -sleep(10) - -1E1E1 -puts 30.send(:/, 5) # prints 6 - -# fun with class attributes -class Foo - def self.blub x - if not x.nil? - self.new - end - end - def another_way_to_get_class - self.class - end -end - -# ruby 1.9 "call operator" -a = Proc.new { 42 } -a.() - -"instance variables can be #@included, #@@class_variables\n and #$globals as well." -`instance variables can be #@included, #@@class_variables\n and #$globals as well.` -'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -/instance variables can be #@included, #@@class_variables\n and #$globals as well./mousenix -:"instance variables can be #@included, #@@class_variables\n and #$globals as well." -:'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%q'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%Q'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%w'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%W'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%s'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%r'instance variables can be #@included, #@@class_variables\n and #$globals as well.' -%x'instance variables can be #@included, #@@class_variables\n and #$globals as well.' - -#%W[ but #@0illegal_values look strange.] - -%s#ruby allows strange#{constructs} -%s#ruby allows strange#$constructs -%s#ruby allows strange#@@constructs - -################################################################## -# HEREDOCS -foo(<<-A, <<-B) -this is the text of a -A -and this is the text of b -B - -a = <<"EOF" -This is a multiline #$here document -terminated by EOF on a line by itself -EOF - -a = <<'EOF' -This is a multiline #$here document -terminated by EOF on a line by itself -EOF - -b=(p[x] %32)/16<1 ? 0 : 1 - -<<"" -#{test} -#@bla -#die suppe!!! -\xfffff - - -super <<-EOE % [ - foo -EOE - -< [1, 2, 3, 4, 5, 6] -p [1,2,3].`(:concat, [4,5,6]) # => [1, 2, 3, 4, 5, 6] -p "Hurra! ".`(:*, 3) # => "Hurra! Hurra! Hurra! " -p "Hurra! ".`('*', 3) # => "Hurra! Hurra! Hurra! " -# Leider geht nicht die Wunschform -# [1,2,3] `concat` [4,5,6] - -class Object - @@infixops = [] - alias :xeq :` - def addinfix(operator) - @@infixops << operator - end - def `(expression) - @@infixops.each{|op|break if expression.match(/^(.*?) (#{op}) (.*)$/)} - raise "unknown infix operator in expression: #{expression}" if $2 == nil - eval($1).method($2.to_sym).call(eval($3)) - end -end -addinfix("concat") -p `[1,2,3] concat [4,5,6]` # => [1, 2, 3, 4, 5, 6] - - -# HEREDOC FUN!!!!!!!1111 -foo(< - <% rows.each do |row| %> - -
    <%= item.title %> - <%= item.description %> - - <% end %> - - - -

    Pages

    - - - - - - - - - - -<% if @homepage -%> -<%= render_node @homepage -%> -<% else -%> - - - -<% end -%> - -
    PageStatusModify
    No Pages
    - -
    -

    -<% unless @homepage -%> - <%= link_to image_tag('new-homepage', :alt => 'New Homepage'), homepage_new_url %> -<% end -%> - <%= image_submit_tag 'clear-page-cache' %> -

    -
    diff --git a/tests/examplefiles/test.rsl b/tests/examplefiles/test.rsl deleted file mode 100644 index d6c9fc9a..00000000 --- a/tests/examplefiles/test.rsl +++ /dev/null @@ -1,111 +0,0 @@ -scheme COMPILER = -class - type - Prog == mk_Prog(stmt : Stmt), - - Stmt == - mk_Asgn(ide : Identifier, expr : Expr) | - mk_If(cond : Expr, s1 : Stmt, s2 : Stmt) | - mk_Seq(head : Stmt, last : Stmt), - - Expr == - mk_Const(const : Int) | - mk_Plus(fst : Expr, snd : Expr) | - mk_Id(ide : Identifier), - Identifier = Text - -type /* storage for program variables */ - `Sigma = Identifier -m-> Int - -value - m : Prog -> `Sigma -> `Sigma - m(p)(`sigma) is m(stmt(p))(`sigma), - - m : Stmt -> `Sigma -> `Sigma - m(s)(`sigma) is - case s of - mk_Asgn(i, e) -> `sigma !! [i +> m(e)(`sigma)], - mk_Seq(s1, s2) -> m(s2)(m(s1)(`sigma)), - mk_If(c, s1, s2) -> - if m(c)(`sigma) ~= 0 then m(s1)(`sigma) else m(s2)(`sigma) end - end, - - m : Expr -> `Sigma -> Int - m(e)(`sigma) is - case e of - mk_Const(n) -> n, - mk_Plus(e1, e2) -> m(e1)(`sigma) + m(e2)(`sigma), - mk_Id(id) -> if id isin dom `sigma then `sigma(id) else 0 end - end - -type - MProg = Inst-list, - Inst == - mk_Push(ide1 : Identifier) | - mk_Pop(Unit) | - mk_Add(Unit) | - mk_Cnst(val : Int) | - mk_Store(ide2 : Identifier) | - mk_Jumpfalse(off1 : Int) | - mk_Jump(off2 : Int) - - -/* An interpreter for SMALL instructions */ - -type Stack = Int-list -value - I : MProg >< Int >< Stack -> (`Sigma ->`Sigma) - I(mp, pc, s)(`sigma) is - if pc <= 0 \/ pc > len mp then `sigma else - case mp(pc) of - mk_Push(x) -> if x isin dom `sigma - then I(mp, pc + 1, <.`sigma(x).> ^ s)(`sigma) - else I(mp, pc + 1, <.0.> ^ s)(`sigma) end, - mk_Pop(()) -> if len s = 0 then `sigma - else I(mp, pc + 1, tl s)(`sigma) end, - mk_Cnst(n) -> I(mp, pc + 1, <.n.> ^ s)(`sigma), - mk_Add(()) -> if len s < 2 then `sigma - else I(mp, pc + 1,<.s(1) + s(2).> ^ tl tl s)(`sigma) end, - mk_Store(x) -> if len s = 0 then `sigma - else I(mp, pc + 1, s)(`sigma !! [x +> s(1)]) end, - mk_Jumpfalse(n) -> if len s = 0 then `sigma - elsif hd s ~= 0 then I(mp, pc + 1, s)(`sigma) - else I(mp, pc + n, s)(`sigma) end, - mk_Jump(n) -> I(mp, pc + n, s)(`sigma) - end - end - -value - comp_Prog : Prog -> MProg - comp_Prog(p) is comp_Stmt(stmt(p)), - - comp_Stmt : Stmt -> MProg - comp_Stmt(s) is - case s of - mk_Asgn(id, e) -> comp_Expr(e) ^ <. mk_Store(id), mk_Pop() .>, - mk_Seq(s1, s2) -> comp_Stmt(s1) ^ comp_Stmt(s2), - mk_If(e, s1, s2) -> - let - ce = comp_Expr(e), - cs1 = comp_Stmt(s1), cs2 = comp_Stmt(s2) - in - ce ^ - <. mk_Jumpfalse(len cs1 + 3) .> ^ - <. mk_Pop() .> ^ - cs1 ^ - <. mk_Jump(len cs2 + 2) .> ^ - <. mk_Pop() .> ^ - cs2 - end - end, - - comp_Expr : Expr -> MProg - comp_Expr(e) is - case e of - mk_Const(n) -> <. mk_Cnst(n) .>, - mk_Plus(e1, e2) -> - comp_Expr(e1) ^ comp_Expr(e2) ^ <. mk_Add() .>, - mk_Id(id) -> <. mk_Push(id) .> - end - -end diff --git a/tests/examplefiles/test.scaml b/tests/examplefiles/test.scaml deleted file mode 100644 index 8872a83d..00000000 --- a/tests/examplefiles/test.scaml +++ /dev/null @@ -1,8 +0,0 @@ --@ import val city:String = "Tampa" -- val name:String = "Hiram" -%html - %body - %p Hello #{name} from #{city} - %ul - - for ( i <- 1 to 10 ) - %li Item #{i} \ No newline at end of file diff --git a/tests/examplefiles/test.sco b/tests/examplefiles/test.sco deleted file mode 100644 index d997c1b3..00000000 --- a/tests/examplefiles/test.sco +++ /dev/null @@ -1,22 +0,0 @@ -/* - * comment - */ -; comment -// comment -a b C d e f i q s t v x y -z -np0 nP1 Np2 NP3 -m/**/label; -n label -123 0123456789 -0xabcdef0123456789 0XABCDEF -1e2 3e+4 5e-6 7E8 9E+0 1E-2 3. 4.56 .789 -"characters$MACRO." -{ 1 I - { 2 J - { 3 K - $I $J $K - } - } -} -#include "score.sco" diff --git a/tests/examplefiles/test.shen b/tests/examplefiles/test.shen deleted file mode 100644 index 7a254334..00000000 --- a/tests/examplefiles/test.shen +++ /dev/null @@ -1,137 +0,0 @@ -(package pygments-test [some symbols] - -\* multiline - comment -*\ - -\\ With vars as functions - -(define super - [Value Succ End] Action Combine Zero -> - (if (End Value) - Zero - (Combine (Action Value) - (super [(Succ Value) Succ End] - Action Combine Zero)))) - -(define for - Stream Action -> (super Stream Action (function do) 0)) - -(define filter - Stream Condition -> - (super Stream - (/. Val (if (Condition Val) [Val] [])) - (function append) - [])) - -(for [0 (+ 1) (= 10)] (function print)) - -(filter [0 (+ 1) (= 100)] - (/. X (integer? (/ X 3)))) - - -\\ Typed functions - -(define typed-map - { (A --> B) --> (list A) --> (list B) } - F X -> (typed-map-h F X [])) - -(define typed-map-h - { (A --> B) --> (list A) --> (list B) \\ comment - --> (list B) } - _ [] X -> (reverse X) - F [X | Y] Z -> (typed-map-h F Y [(F X) | Z])) - -(define append-string - { string --> string \* comment *\ --> string } - S1 S2 -> (cn S1 S2)) - -(let X 1 - Y 2 - (+ (type X number) (type Y number))) - -\\ Yacc - -(defcc - - := (package-macro (macroexpand ) ); - := [{ | ]; - := [} | ]; - := [bar! | ]; - := [; | ]; - := [:= | ]; - := [:- | ]; - := [: | ]; - := [(intern ",") | ]; - := [];) - -(defcc - 91 := skip;) - -\\ Pattern matching - -(define matches - 1 X 3 -> X - X Y Z -> Y where (and (= X 1) (= Z 3)) - true false _ -> true - (@p a X c) (@s X "abc") (@v 1 2 3 <>) -> true - [X | Rest] [] [a b c] -> true - [(@p a b)] [[[1] 2] X] "string" -> true - _ _ _ -> false) - - -\\ Prolog - -(defprolog th* - X A Hyps <-- (show [X : A] Hyps) (when false); - X A _ <-- (fwhen (typedf? X)) (bind F (sigf X)) (call [F A]); - (mode [F] -) A Hyp <-- (th* F [--> A] Hyp); - (mode [cons X Y] -) [list A] Hyp <-- (th* X A Hyp) (th* Y [list A] Hyp); - (mode [@s X Y] -) string Hyp <-- (th* X string Hyp) (th* Y string Hyp); - (mode [lambda X Y] -) [A --> B] Hyp <-- ! - (bind X&& (placeholder)) - (bind Z (ebr X&& X Y)) - (th* Z B [[X&& : A] | Hyp]); - (mode [type X A] -) B Hyp <-- ! (unify A B) (th* X A Hyp);) - -\\ Macros - -(defmacro log-macro - [log N] -> [log N 10]) - -\\ Sequent calculus - -(datatype rank - - if (element? X [ace 2 3 4 5 6 7 8 9 10 jack queen king]) - ________ - X : rank;) - -(datatype suit - - if (element? Suit [spades hearts diamonds clubs]) - _________ - Suit : suit;) - -(datatype card - - Rank : rank; Suit : suit; - _________________ - [Rank Suit] : card; - - Rank : rank, Suit : suit >> P; - _____________________ - [Rank Suit] : card >> P;) - -(datatype card - - Rank : rank; Suit : suit; - ================== - [Rank Suit] : card;) - -\\ String interpolation and escape sequences - -"abc~A ~S~R ~% blah - c#30;c#31;blah" - -) diff --git a/tests/examplefiles/test.sil b/tests/examplefiles/test.sil deleted file mode 100644 index 3bcee835..00000000 --- a/tests/examplefiles/test.sil +++ /dev/null @@ -1,206 +0,0 @@ -domain Option__Node { - unique function Option__Node__Some(): Option__Node - unique function Option__Node__None(): Option__Node - - function variantOfOptionNode(self: Ref): Option__Node - - function isOptionNode(self: Ref): Bool - - axiom ax_variantOfOptionNodeChoices { - forall x: Ref :: { variantOfOptionNode(x) } - (variantOfOptionNode(x) == Option__Node__Some() || variantOfOptionNode(x) == Option__Node__None()) - } - - axiom ax_isCounterState { - forall x: Ref :: { variantOfOptionNode(x) } - isOptionNode(x) == (variantOfOptionNode(x) == Option__Node__Some() || - variantOfOptionNode(x) == Option__Node__None()) - } -} - -predicate validOption(this: Ref) { - isOptionNode(this) && - variantOfOptionNode(this) == Option__Node__Some() ==> ( - acc(this.Option__Node__Some__1, write) && - acc(validNode(this.Option__Node__Some__1)) - ) -} - -field Option__Node__Some__1: Ref - -field Node__v: Int -field Node__next: Ref - -predicate validNode(this: Ref) { - acc(this.Node__v) && - acc(this.Node__next) && - acc(validOption(this.Node__next)) -} - - -function length(this: Ref): Int - requires acc(validNode(this), write) - ensures result >= 1 -{ - (unfolding acc(validNode(this), write) in - unfolding acc(validOption(this.Node__next)) in - (variantOfOptionNode(this.Node__next) == Option__Node__None()) ? - 1 : 1 + length(this.Node__next.Option__Node__Some__1) - ) -} - -function itemAt(this: Ref, i: Int): Int - requires acc(validNode(this), write) - requires 0 <= i && i < length(this) -{ - unfolding acc(validNode(this), write) in unfolding acc(validOption(this.Node__next)) in ( - (i == 0) ? - this.Node__v: - (variantOfOptionNode(this.Node__next) == Option__Node__Some()) ? - itemAt(this.Node__next.Option__Node__Some__1, i-1) : this.Node__v - ) -} - -function sum(this$1: Ref): Int - requires acc(validNode(this$1), write) -{ - (unfolding acc(validNode(this$1), write) in unfolding acc(validOption(this$1.Node__next)) in - (variantOfOptionNode(this$1.Node__next) == Option__Node__None()) ? this$1.Node__v : this$1.Node__v + sum(this$1.Node__next.Option__Node__Some__1)) -} - -method append(this: Ref, val: Int) - requires acc(validNode(this), write) - ensures acc(validNode(this), write) /* POST1 */ - ensures length(this) == (old(length(this)) + 1) /* POST2 */ - ensures (forall i: Int :: (0 <= i && i < old(length(this))) ==> (itemAt(this, i) == old(itemAt(this, i)))) /* POST3 */ - ensures itemAt(this, length(this) - 1) == val /* POST4 */ - ensures true ==> true -{ - var tmp_node: Ref - var tmp_option: Ref - - unfold acc(validNode(this), write) - unfold acc(validOption(this.Node__next), write) - - if (variantOfOptionNode(this.Node__next) == Option__Node__None()) { - tmp_node := new(Node__next, Node__v) - tmp_node.Node__next := null - tmp_node.Node__v := val - - assume variantOfOptionNode(tmp_node.Node__next) == Option__Node__None() - fold acc(validOption(tmp_node.Node__next)) - fold acc(validNode(tmp_node), write) - - tmp_option := new(Option__Node__Some__1) - tmp_option.Option__Node__Some__1 := tmp_node - assume variantOfOptionNode(tmp_option) == Option__Node__Some() - fold acc(validOption(tmp_option)) - - this.Node__next := tmp_option - - - unfold validOption(tmp_option) - assert length(tmp_node) == 1 /* TODO: Required by Silicon, POST2 fails otherwise */ - assert itemAt(tmp_node, 0) == val /* TODO: Required by Silicon, POST4 fails otherwise */ - fold validOption(tmp_option) - } else { - append(this.Node__next.Option__Node__Some__1, val) - fold acc(validOption(this.Node__next), write) - } - - fold acc(validNode(this), write) -} - -method prepend(tail: Ref, val: Int) returns (res: Ref) - requires acc(validNode(tail)) - ensures acc(validNode(res)) - //ensures acc(validNode(tail)) - ensures length(res) == old(length(tail)) + 1 - - ensures (forall i: Int :: (1 <= i && i < length(res)) ==> (itemAt(res, i) == old(itemAt(tail, i-1)))) /* POST3 */ - ensures itemAt(res, 0) == val -{ - var tmp_option: Ref - - res := new(Node__v, Node__next) - res.Node__v := val - - tmp_option := new(Option__Node__Some__1) - tmp_option.Option__Node__Some__1 := tail - assume variantOfOptionNode(tmp_option) == Option__Node__Some() - - res.Node__next := tmp_option - - assert acc(validNode(tail)) - fold acc(validOption(res.Node__next)) - fold acc(validNode(res)) -} - -method length_iter(list: Ref) returns (len: Int) - requires acc(validNode(list), write) - ensures old(length(list)) == len - // TODO we have to preserve this property - // ensures acc(validNode(list)) -{ - var curr: Ref := list - var tmp: Ref := list - - len := 1 - - unfold acc(validNode(curr)) - unfold acc(validOption(curr.Node__next)) - while(variantOfOptionNode(curr.Node__next) == Option__Node__Some()) - invariant acc(curr.Node__v) - invariant acc(curr.Node__next) - invariant (variantOfOptionNode(curr.Node__next) == Option__Node__Some() ==> ( - acc(curr.Node__next.Option__Node__Some__1, write) && - acc(validNode(curr.Node__next.Option__Node__Some__1)) - )) - invariant (variantOfOptionNode(curr.Node__next) == Option__Node__Some() ==> len + length(curr.Node__next.Option__Node__Some__1) == old(length(list))) - invariant (variantOfOptionNode(curr.Node__next) == Option__Node__None() ==> len == old(length(list))) - { - assert acc(validNode(curr.Node__next.Option__Node__Some__1)) - len := len + 1 - tmp := curr - curr := curr.Node__next.Option__Node__Some__1 - unfold acc(validNode(curr)) - unfold acc(validOption(curr.Node__next)) - } -} - -method t1() -{ - var l: Ref - - l := new(Node__v, Node__next) - l.Node__next := null - l.Node__v := 1 - assume variantOfOptionNode(l.Node__next) == Option__Node__None() - - fold validOption(l.Node__next) - fold validNode(l) - - assert length(l) == 1 - assert itemAt(l, 0) == 1 - - append(l, 7) - assert itemAt(l, 1) == 7 - assert itemAt(l, 0) == 1 - assert length(l) == 2 - - l := prepend(l, 10) - assert itemAt(l, 2) == 7 - assert itemAt(l, 1) == 1 - assert itemAt(l, 0) == 10 - assert length(l) == 3 - - //assert sum(l) == 18 -} - -method t2(l: Ref) returns (res: Ref) - requires acc(validNode(l), write) - ensures acc(validNode(res), write) - ensures length(res) > old(length(l)) -{ - res := prepend(l, 10) -} diff --git a/tests/examplefiles/test.ssp b/tests/examplefiles/test.ssp deleted file mode 100644 index 96d26d55..00000000 --- a/tests/examplefiles/test.ssp +++ /dev/null @@ -1,12 +0,0 @@ -<%@ val someName: String = "someDefaultValue" %> -<% import com.acme.MySnippets._ %> - - -

    Hello ${someName}%

    - -
      -<%= for (person <- people) { %> -
    • ${person.name}
    • -<% } %> -
    - diff --git a/tests/examplefiles/test.swift b/tests/examplefiles/test.swift deleted file mode 100644 index 8ef19763..00000000 --- a/tests/examplefiles/test.swift +++ /dev/null @@ -1,65 +0,0 @@ -// -// test.swift -// from https://github.com/fullstackio/FlappySwift -// -// Created by Nate Murray on 6/2/14. -// Copyright (c) 2014 Fullstack.io. All rights reserved. -// - -import UIKit -import SpriteKit - -extension SKNode { - class func unarchiveFromFile(file : NSString) -> SKNode? { - - let path = NSBundle.mainBundle().pathForResource(file, ofType: "sks") - - var sceneData = NSData.dataWithContentsOfFile(path, options: .DataReadingMappedIfSafe, error: nil) - var archiver = NSKeyedUnarchiver(forReadingWithData: sceneData) - - archiver.setClass(self.classForKeyedUnarchiver(), forClassName: "SKScene") - let scene = archiver.decodeObjectForKey(NSKeyedArchiveRootObjectKey) as GameScene - archiver.finishDecoding() - return scene - } -} - -class GameViewController: UIViewController { - - override func viewDidLoad() { - super.viewDidLoad() - - if let scene = GameScene.unarchiveFromFile("GameScene") as? GameScene { - // Configure the view. - let skView = self.view as SKView - skView.showsFPS = true - skView.showsNodeCount = true - - /* Sprite Kit applies additional optimizations to improve rendering performance */ - skView.ignoresSiblingOrder = true - - /* Set the scale mode to scale to fit the window */ - scene.scaleMode = .AspectFill - - skView.presentScene(scene) - } - } - - override func shouldAutorotate() -> Bool { - return true - } - - override func supportedInterfaceOrientations() -> Int { - if UIDevice.currentDevice().userInterfaceIdiom == .Phone { - return Int(UIInterfaceOrientationMask.AllButUpsideDown.toRaw()) - } else { - return Int(UIInterfaceOrientationMask.All.toRaw()) - } - } - - override func didReceiveMemoryWarning() { - super.didReceiveMemoryWarning() - // Release any cached data, images, etc that aren't in use. - } - -} diff --git a/tests/examplefiles/test.tcsh b/tests/examplefiles/test.tcsh deleted file mode 100644 index e215ed04..00000000 --- a/tests/examplefiles/test.tcsh +++ /dev/null @@ -1,830 +0,0 @@ -# -# $Id: complete.tcsh,v 1.2 1998/05/11 10:40:54 luisgh Exp $ -# example file using the new completion code -# - -# Debian GNU/Linux -# file: /usr/share/doc/examples/tcsh/complete.tcsh -# -# This file may be read from user's .cshrc file by decompressing it into -# the home directory as ~/.complete and then adding the line -# "source ${HOME}/.complete" and maybe defining some of -# the shell variables described below. -# -# Debian enhancements by Vadim Vygonets : -# -# Added two Debian-specific completions: dpkg and dpkg-deb (who wrote -# them?). To turn it off, define no_debian_complete before loading -# this file. -# -# Added some new completions. To turn them off, define -# no_new_complete before loading this file. -# -# Changed completions of several commands. The ones are evaluated if -# the following shell variables are defined: -# -# traditional_cp_mv_complete -# for traditional completion of cp and mv commands -# traditional_zcat_complete -# for traditional completion of zcat command -# traditional_nm_complete -# for traditional completion of nm command -# traditilnal_tex_complete -# for traditional completion of tex command -# traditional_find_complete -# for traditional completion of find command -# traditional_configure_complete -# for traditional completion of ./configure command -# foolproof_rm_complete or traditional_rm_complete -# for traditional completion of rm command -# traditional_complete -# all of the above - -if ($?traditional_complete) then - set traditional_cp_mv_complete - set traditional_zcat_complete - set traditional_nm_complete - set traditilnal_tex_complete - set traditional_find_complete - set traditional_configure_complete - set foolproof_rm_complete -endif - -if ($?traditional_rm_complete) then - set foolproof_rm_complete -endif - -onintr - -if (! $?prompt) goto end - -if ($?tcsh) then - if ($tcsh != 1) then - set rev=$tcsh:r - set rel=$rev:e - set pat=$tcsh:e - set rev=$rev:r - endif - if ($rev > 5 && $rel > 1) then - set complete=1 - endif - unset rev rel pat -endif - -if ($?complete) then - set noglob - set hosts - foreach f ($HOME/.hosts /usr/local/etc/csh.hosts $HOME/.rhosts /etc/hosts.equiv) - if ( -r $f ) then - set hosts=($hosts `cut -d " " -f 1 $f | grep -v +`) - endif - end - if ( -r $HOME/.netrc ) then - set f=`awk '/machine/ { print $2 }' < $HOME/.netrc` >& /dev/null - set hosts=($hosts $f) - endif - unset f - if ( ! $?hosts ) then - set hosts=(hyperion.ee.cornell.edu phaeton.ee.cornell.edu \ - guillemin.ee.cornell.edu vangogh.cs.berkeley.edu \ - ftp.uu.net prep.ai.mit.edu export.lcs.mit.edu \ - labrea.stanford.edu sumex-aim.stanford.edu \ - tut.cis.ohio-state.edu) - endif - - complete ywho n/*/\$hosts/ # argument from list in $hosts - complete rsh p/1/\$hosts/ c/-/"(l n)"/ n/-l/u/ N/-l/c/ n/-/c/ p/2/c/ p/*/f/ - complete xrsh p/1/\$hosts/ c/-/"(l 8 e)"/ n/-l/u/ N/-l/c/ n/-/c/ p/2/c/ p/*/f/ - complete rlogin p/1/\$hosts/ c/-/"(l 8 e)"/ n/-l/u/ - complete telnet p/1/\$hosts/ p/2/x:''/ n/*/n/ - - complete cd p/1/d/ # Directories only - complete chdir p/1/d/ - complete pushd p/1/d/ - complete popd p/1/d/ - complete pu p/1/d/ - complete po p/1/d/ - complete complete p/1/X/ # Completions only - complete uncomplete n/*/X/ - complete exec p/1/c/ # Commands only - complete trace p/1/c/ - complete strace p/1/c/ - complete which n/*/c/ - complete where n/*/c/ - complete skill p/1/c/ - complete dde p/1/c/ - complete adb c/-I/d/ n/-/c/ N/-/"(core)"/ p/1/c/ p/2/"(core)"/ - complete sdb p/1/c/ - complete dbx c/-I/d/ n/-/c/ N/-/"(core)"/ p/1/c/ p/2/"(core)"/ - complete xdb p/1/c/ - complete gdb n/-d/d/ n/*/c/ - complete ups p/1/c/ - complete set 'c/*=/f/' 'p/1/s/=' 'n/=/f/' - complete unset n/*/s/ - complete alias p/1/a/ # only aliases are valid - complete unalias n/*/a/ - complete xdvi n/*/f:*.dvi/ # Only files that match *.dvi - complete dvips n/*/f:*.dvi/ -if ($?traditilnal_tex_complete) then - complete tex n/*/f:*.tex/ # Only files that match *.tex -else - complete tex n/*/f:*.{tex,texi}/ # Files that match *.tex and *.texi -endif - complete latex n/*/f:*.{tex,ltx}/ - complete su c/--/"(login fast preserve-environment command shell \ - help version)"/ c/-/"(f l m p c s -)"/ \ - n/{-c,--command}/c/ \ - n@{-s,--shell}@'`cat /etc/shells`'@ n/*/u/ - complete cc c/-[IL]/d/ \ - c@-l@'`\ls -1 /usr/lib/lib*.a | sed s%^.\*/lib%%\;s%\\.a\$%%`'@ \ - c/-/"(o l c g L I D U)"/ n/*/f:*.[coasi]/ - complete acc c/-[IL]/d/ \ - c@-l@'`\ls -1 /usr/lang/SC1.0/lib*.a | sed s%^.\*/lib%%\;s%\\.a\$%%`'@ \ - c/-/"(o l c g L I D U)"/ n/*/f:*.[coasi]/ - complete gcc c/-[IL]/d/ \ - c/-f/"(caller-saves cse-follow-jumps delayed-branch \ - elide-constructors expensive-optimizations \ - float-store force-addr force-mem inline \ - inline-functions keep-inline-functions \ - memoize-lookups no-default-inline \ - no-defer-pop no-function-cse omit-frame-pointer \ - rerun-cse-after-loop schedule-insns \ - schedule-insns2 strength-reduce \ - thread-jumps unroll-all-loops \ - unroll-loops syntax-only all-virtual \ - cond-mismatch dollars-in-identifiers \ - enum-int-equiv no-asm no-builtin \ - no-strict-prototype signed-bitfields \ - signed-char this-is-variable unsigned-bitfields \ - unsigned-char writable-strings call-saved-reg \ - call-used-reg fixed-reg no-common \ - no-gnu-binutils nonnull-objects \ - pcc-struct-return pic PIC shared-data \ - short-enums short-double volatile)"/ \ - c/-W/"(all aggregate-return cast-align cast-qual \ - comment conversion enum-clash error format \ - id-clash-len implicit missing-prototypes \ - no-parentheses pointer-arith return-type shadow \ - strict-prototypes switch uninitialized unused \ - write-strings)"/ \ - c/-m/"(68000 68020 68881 bitfield fpa nobitfield rtd \ - short c68000 c68020 soft-float g gnu unix fpu \ - no-epilogue)"/ \ - c/-d/"(D M N)"/ \ - c/-/"(f W vspec v vpath ansi traditional \ - traditional-cpp trigraphs pedantic x o l c g L \ - I D U O O2 C E H B b V M MD MM i dynamic \ - nodtdlib static nostdinc undef)"/ \ - c/-l/f:*.a/ \ - n/*/f:*.{c,C,cc,o,a,s,i}/ - complete g++ n/*/f:*.{C,cc,o,s,i}/ - complete CC n/*/f:*.{C,cc,o,s,i}/ -if ($?foolproof_rm_complete) then - complete rm c/--/"(directory force interactive verbose \ - recursive help version)"/ c/-/"(d f i v r R -)"/ \ - n/*/f:^*.{c,cc,C,h,in}/ # Protect precious files -else - complete rm c/--/"(directory force interactive verbose \ - recursive help version)"/ c/-/"(d f i v r R -)"/ -endif - complete vi n/*/f:^*.[oa]/ - complete bindkey N/-a/b/ N/-c/c/ n/-[ascr]/'x:'/ \ - n/-[svedlr]/n/ c/-[vedl]/n/ c/-/"(a s k c v e d l r)"/\ - n/-k/"(left right up down)"/ p/2-/b/ \ - p/1/'x:'/ - -if ($?traditional_find_complete) then - complete find n/-fstype/"(nfs 4.2)"/ n/-name/f/ \ - n/-type/"(c b d f p l s)"/ n/-user/u/ n/-group/g/ \ - n/-exec/c/ n/-ok/c/ n/-cpio/f/ n/-ncpio/f/ n/-newer/f/ \ - c/-/"(fstype name perm prune type user nouser \ - group nogroup size inum atime mtime ctime exec \ - ok print ls cpio ncpio newer xdev depth \ - daystart follow maxdepth mindepth noleaf version \ - anewer cnewer amin cmin mmin true false uid gid \ - ilname iname ipath iregex links lname empty path \ - regex used xtype fprint fprint0 fprintf \ - print0 printf not a and o or)"/ \ - n/*/d/ -else - complete find n/-fstype/"(ufs nfs tmp mfs minix ext2 msdos umsdos vfat proc iso9660 4.2 4.3 local)"/ \ - n/-name/f/ \ - n/-type/"(c b d f p l s)"/ n/-user/u/ n/-group/g/ \ - n/-exec/c/ n/-ok/c/ n/-cpio/f/ n/-ncpio/f/ n/-newer/f/ \ - c/-/"(fstype name perm prune type user nouser \ - group nogroup size inum atime mtime ctime exec \ - ok print ls cpio ncpio newer xdev depth \ - daystart follow maxdepth mindepth noleaf version \ - anewer cnewer amin cmin mmin true false uid gid \ - ilname iname ipath iregex links lname empty path \ - regex used xtype fprint fprint0 fprintf \ - print0 printf not a and o or)"/ \ - n/*/d/ -endif - complete -%* c/%/j/ # fill in the jobs builtin - complete {fg,bg,stop} c/%/j/ p/1/"(%)"// - - complete limit c/-/"(h)"/ n/*/l/ - complete unlimit c/-/"(h)"/ n/*/l/ - - complete -co* p/0/"(compress)"/ # make compress completion - # not ambiguous -if ($?traditional_zcat_complete) then - complete zcat n/*/f:*.Z/ -else - complete zcat c/--/"(force help license quiet version)"/ \ - c/-/"(f h L q V -)"/ n/*/f:*.{gz,Z,z,zip}/ -endif -if ($?traditional_nm_complete) then - complete nm n/*/f:^*.{h,C,c,cc}/ -else -complete nm 'c/--radix=/x:/' \ - 'c/--target=/x:/' \ - 'c/--format=/(bsd sysv posix)/n/' \ - 'c/--/(debugsyms extern-only demangle dynamic print-armap \ - print-file-name numeric-sort no-sort reverse-sort \ - size-sort undefined-only portability target= radix= \ - format= defined-only\ line-numbers no-demangle version \ - help)//' \ - 'n/*/f:^*.{h,c,cc,s,S}/' -endif - - complete finger c/*@/\$hosts/ n/*/u/@ - complete ping p/1/\$hosts/ - complete traceroute p/1/\$hosts/ - - complete {talk,ntalk,phone} p/1/'`users | tr " " "\012" | uniq`'/ \ - n/*/\`who\ \|\ grep\ \$:1\ \|\ awk\ \'\{\ print\ \$2\ \}\'\`/ - - complete ftp c/-/"(d i g n v)"/ n/-/\$hosts/ p/1/\$hosts/ n/*/n/ - - # this one is simple... - #complete rcp c/*:/f/ C@[./\$~]*@f@ n/*/\$hosts/: - # From Michael Schroeder - # This one will rsh to the file to fetch the list of files! - complete rcp 'c%*@*:%`set q=$:-0;set q="$q:s/@/ /";set q="$q:s/:/ /";set q=($q " ");rsh $q[2] -l $q[1] ls -dp $q[3]\*`%' 'c%*:%`set q=$:-0;set q="$q:s/:/ /";set q=($q " ");rsh $q[1] ls -dp $q[2]\*`%' 'c%*@%$hosts%:' 'C@[./$~]*@f@' 'n/*/$hosts/:' - - complete dd c/--/"(help version)"/ c/[io]f=/f/ \ - c/conv=*,/"(ascii ebcdic ibm block unblock \ - lcase ucase swab noerror sync)"/,\ - c/conv=/"(ascii ebcdic ibm block unblock \ - lcase ucase swab noerror sync)"/,\ - c/*=/x:''/ \ - n/*/"(if of conv ibs obs bs cbs files skip file seek count)"/= - - complete nslookup p/1/x:''/ p/2/\$hosts/ - - complete ar c/[dmpqrtx]/"(c l o u v a b i)"/ p/1/"(d m p q r t x)"// \ - p/2/f:*.a/ p/*/f:*.o/ - - complete {refile,sprev,snext,scan,pick,rmm,inc,folder,show} \ - c@+@F:$HOME/Mail/@ - - # these and interrupt handling from Jaap Vermeulen - complete {rexec,rxexec,rxterm,rmterm} \ - 'p/1/$hosts/' 'c/-/(l L E)/' 'n/-l/u/' 'n/-L/f/' \ - 'n/-E/e/' 'n/*/c/' - complete kill 'c/-/S/' 'c/%/j/' \ - 'n/*/`ps -u $LOGNAME | awk '"'"'{print $1}'"'"'`/' - - # these from Marc Horowitz - complete attach 'n/-mountpoint/d/' 'n/-m/d/' 'n/-type/(afs nfs rvd ufs)/' \ - 'n/-t/(afs nfs rvd ufs)/' 'n/-user/u/' 'n/-U/u/' \ - 'c/-/(verbose quiet force printpath lookup debug map \ - nomap remap zephyr nozephyr readonly write \ - mountpoint noexplicit explicit type mountoptions \ - nosetuid setuid override skipfsck lock user host)/' \ - 'n/-e/f/' 'n/*/()/' - complete hesinfo 'p/1/u/' \ - 'p/2/(passwd group uid grplist pcap pobox cluster \ - filsys sloc service)/' - - # these from E. Jay Berkenbilt -if ($?traditional_configure_complete) then - complete ./configure 'c/--*=/f/' 'c/--{cache-file,prefix,srcdir}/(=)//' \ - 'c/--/(cache-file verbose prefix srcdir)//' -else -complete ./configure \ - 'c@--{prefix,exec-prefix,bindir,sbindir,libexecdir,datadir,sysconfdir,sharedstatedir,localstatedir,infodir,mandir,srcdir,x-includes,x-libraries}=*@x:'@ \ - 'c/--cachefile=*/x:/' \ - 'c/--{enable,disable,with}-*/x://' \ - 'c/--*=/x:/' \ - 'c/--/(prefix= exec-prefix= bindir= sbindir= \ - libexecdir= datadir= sysconfdir= \ - sharedstatedir= localstatedir= infodir= \ - mandir= srcdir= x-includes= x-libraries= \ - enable- disable- with- )//' \ - 'c/--(help no-create quiet silent version \ - verbose)/' -endif - complete gs 'c/-sDEVICE=/(x11 cdjmono cdj550 epson eps9high epsonc \ - dfaxhigh dfaxlow laserjet ljet4 sparc pbm \ - pbmraw pgm pgmraw ppm ppmraw bit)/' \ - 'c/-sOutputFile=/f/' 'c/-s/(DEVICE OutputFile)/=' \ - 'c/-d/(NODISPLAY NOPLATFONTS NOPAUSE)/' 'n/*/f/' - complete perl 'n/-S/c/' - complete printenv 'n/*/e/' - complete sccs p/1/"(admin cdc check clean comb deledit delget \ - delta diffs edit enter fix get help info \ - print prs prt rmdel sccsdiff tell unedit \ - unget val what)"/ - complete setenv 'p/1/e/' 'c/*:/f/' - - # these and method of setting hosts from Kimmo Suominen - if ( -f $HOME/.mh_profile && -x "`which folders`" ) then - - if ( ! $?FOLDERS ) setenv FOLDERS "`folders -fast -recurse`" - if ( ! $?MHA ) setenv MHA "`ali | sed -e '/^ /d' -e 's/:.*//'`" - - set folders = ( $FOLDERS ) - set mha = ( $MHA ) - - complete ali \ - 'c/-/(alias nolist list nonormalize normalize nouser user help)/' \ - 'n,-alias,f,' - - complete anno \ - 'c/-/(component noinplace inplace nodate date text help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete burst \ - 'c/-/(noinplace inplace noquiet quiet noverbose verbose help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete comp \ - 'c/-/(draftfolder draftmessage nodraftfolder editor noedit file form nouse use whatnowproc nowhatnowproc help)/' \ - 'c,+,$folders,' \ - 'n,-whatnowproc,c,' \ - 'n,-file,f,'\ - 'n,-form,f,'\ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete dist \ - 'c/-/(noannotate annotate draftfolder draftmessage nodraftfolder editor noedit form noinplace inplace whatnowproc nowhatnowproc help)/' \ - 'c,+,$folders,' \ - 'n,-whatnowproc,c,' \ - 'n,-form,f,'\ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete folder \ - 'c/-/(all nofast fast noheader header nopack pack noverbose verbose norecurse recurse nototal total noprint print nolist list push pop help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete folders \ - 'c/-/(all nofast fast noheader header nopack pack noverbose verbose norecurse recurse nototal total noprint print nolist list push pop help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete forw \ - 'c/-/(noannotate annotate draftfolder draftmessage nodraftfolder editor noedit filter form noformat format noinplace inplace digest issue volume whatnowproc nowhatnowproc help)/' \ - 'c,+,$folders,' \ - 'n,-whatnowproc,c,' \ - 'n,-filter,f,'\ - 'n,-form,f,'\ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete inc \ - 'c/-/(audit file noaudit nochangecur changecur file form format nosilent silent notruncate truncate width help)/' \ - 'c,+,$folders,' \ - 'n,-audit,f,'\ - 'n,-form,f,' - - complete mark \ - 'c/-/(add delete list sequence nopublic public nozero zero help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete mhmail \ - 'c/-/(body cc from subject help)/' \ - 'n,-cc,$mha,' \ - 'n,-from,$mha,' \ - 'n/*/$mha/' - - complete mhpath \ - 'c/-/(help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete msgchk \ - 'c/-/(nodate date nonotify notify help)/' - - complete msh \ - 'c/-/(prompt noscan scan notopcur topcur help)/' - - complete next \ - 'c/-/(draft form moreproc nomoreproc length width showproc noshowproc header noheader help)/' \ - 'c,+,$folders,' \ - 'n,-moreproc,c,' \ - 'n,-showproc,c,' \ - 'n,-form,f,' - - complete packf \ - 'c/-/(file help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete pick \ - 'c/-/(and or not lbrace rbrace cc date from search subject to othercomponent after before datefield sequence nopublic public nozero zero nolist list help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete prev \ - 'c/-/(draft form moreproc nomoreproc length width showproc noshowproc header noheader help)/' \ - 'c,+,$folders,' \ - 'n,-moreproc,c,' \ - 'n,-showproc,c,' \ - 'n,-form,f,' - - complete prompter \ - 'c/-/(erase kill noprepend prepend norapid rapid nodoteof doteof help)/' - - complete refile \ - 'c/-/(draft nolink link nopreserve preserve src file help)/' \ - 'c,+,$folders,' \ - 'n,-file,f,'\ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete rmf \ - 'c/-/(nointeractive interactive help)/' \ - 'c,+,$folders,' - - complete rmm \ - 'c/-/(help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete scan \ - 'c/-/(noclear clear form format noheader header width noreverse reverse file help)/' \ - 'c,+,$folders,' \ - 'n,-form,f,'\ - 'n,-file,f,'\ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete send \ - 'c/-/(alias draft draftfolder draftmessage nodraftfolder filter nofilter noformat format noforward forward nomsgid msgid nopush push noverbose verbose nowatch watch width help)/' \ - 'n,-alias,f,'\ - 'n,-filter,f,' - - complete show \ - 'c/-/(draft form moreproc nomoreproc length width showproc noshowproc header noheader help)/' \ - 'c,+,$folders,' \ - 'n,-moreproc,c,' \ - 'n,-showproc,c,' \ - 'n,-form,f,'\ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete sortm \ - 'c/-/(datefield textfield notextfield limit nolimit noverbose verbose help)/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete vmh \ - 'c/-/(prompt vmhproc novmhproc help)/' \ - 'n,-vmhproc,c,' - - complete whatnow \ - 'c/-/(draftfolder draftmessage nodraftfolder editor noedit prompt help)/' - - complete whom \ - 'c/-/(alias nocheck check draft draftfolder draftmessage nodraftfolder help)/' \ - 'n,-alias,f,' - - complete plum \ - 'c/-/()/' \ - 'c,+,$folders,' \ - 'n,*,`(mark | sed "s/:.*//";echo next cur prev first last)|tr " " "\12" | sort -u`,' - - complete mail \ - 'c/-/()/' \ - 'n/*/$mha/' - - endif - - # these from Tom Warzeka - # you may need to set the following variables for your host - set _elispdir = /usr/lib/emacs/19.34/lisp # GNU Emacs lisp directory - set _maildir = /var/spool/mail # Post Office: /var/spool/mail or /usr/mail - set _ypdir = /var/yp # directory where NIS (YP) maps are kept - set _domain = "`dnsdomainname`" - - # this one works but is slow and doesn't descend into subdirectories - # complete cd C@[./\$~]*@d@ \ - # p@1@'`\ls -1F . $cdpath | grep /\$ | sort -u`'@ n@*@n@ - - if ( -r /etc/shells ) then - complete setenv p@1@e@ n@DISPLAY@\$hosts@: n@SHELL@'`cat /etc/shells`'@ - else - complete setenv p@1@e@ n@DISPLAY@\$hosts@: - endif - complete unsetenv n/*/e/ - - if (-r $HOME/.mailrc) then - complete mail c/-/"(e i f n s u v)"/ c/*@/\$hosts/ \ - c@+@F:$HOME/Mail@ C@[./\$~]@f@ n/-s/x:''/ \ - n@-u@T:$_maildir@ n/-f/f/ \ - n@*@'`sed -n s/alias//p $HOME/.mailrc | tr -s " " " " | cut -f 2`'@ - else - complete mail c/-/"(e i f n s u v)"/ c/*@/\$hosts/ \ - c@+@F:$HOME/Mail@ C@[./\$~]@f@ n/-s/x:''/ \ - n@-u@T:$_maildir@ n/-f/f/ n/*/u/ - endif - - complete man n@1@'`\ls -1 /usr/man/man1 | sed s%\\.1.\*\$%%`'@ \ - n@2@'`\ls -1 /usr/man/man2 | sed s%\\.2.\*\$%%`'@ \ - n@3@'`\ls -1 /usr/man/man3 | sed s%\\.3.\*\$%%`'@ \ - n@4@'`\ls -1 /usr/man/man4 | sed s%\\.4.\*\$%%`'@ \ - n@5@'`\ls -1 /usr/man/man5 | sed s%\\.5.\*\$%%`'@ \ - n@6@'`\ls -1 /usr/man/man6 | sed s%\\.6.\*\$%%`'@ \ - n@7@'`\ls -1 /usr/man/man7 | sed s%\\.7.\*\$%%`'@ \ - n@8@'`\ls -1 /usr/man/man8 | sed s%\\.8.\*\$%%`'@ \ - n@9@'`[ -r /usr/man/man9 ] && \ls -1 /usr/man/man9 | sed s%\\.9.\*\$%%`'@ \ - n@0@'`[ -r /usr/man/man0 ] && \ls -1 /usr/man/man0 | sed s%\\.0.\*\$%%`'@ \ - n@new@'`[ -r /usr/man/mann ] && \ls -1 /usr/man/mann | sed s%\\.n.\*\$%%`'@ \ - n@old@'`[ -r /usr/man/mano ] && \ls -1 /usr/man/mano | sed s%\\.o.\*\$%%`'@ \ -n@local@'`[ -r /usr/man/manl ] && \ls -1 /usr/man/manl | sed s%\\.l.\*\$%%`'@ \ -n@public@'`[ -r /usr/man/manp ]&& \ls -1 /usr/man/manp | sed s%\\.p.\*\$%%`'@ \ - c/-/"(- f k P s t)"/ n/-f/c/ n/-k/x:''/ n/-P/d/ \ - N@-P@'`\ls -1 $:-1/man? | sed s%\\..\*\$%%`'@ n/*/c/ - - complete ps c/-t/x:''/ c/-/"(a c C e g k l S t u v w x)"/ \ - n/-k/x:''/ N/-k/x:''/ n/*/x:''/ - complete compress c/-/"(c f v b)"/ n/-b/x:''/ n/*/f:^*.Z/ - complete uncompress c/-/"(c f v)"/ n/*/f:*.Z/ - - complete xhost c/[+-]/\$hosts/ n/*/\$hosts/ - - # these conform to the latest GNU versions available at press time ... - - complete emacs c/-/"(batch d f funcall i insert kill l load \ - no-init-file nw q t u user)"/ c/+/x:''/ \ - n/-d/x:''/ n/-f/x:''/ n/-i/f/ \ - n@-l@F:$_elispdir@ n/-t/x:''/ \ - n/-u/u/ n/*/f:^*[\#~]/ - - complete gzcat c/--/"(force help license quiet version)"/ \ - c/-/"(f h L q V -)"/ n/*/f:*.{gz,Z,z,zip}/ - complete gzip c/--/"(stdout to-stdout decompress uncompress \ - force help list license no-name quiet recurse \ - suffix test verbose version fast best)"/ \ - c/-/"(c d f h l L n q r S t v V 1 2 3 4 5 6 7 8 9 -)"/\ - n/{-S,--suffix}/x:''/ \ - n/{-d,--{de,un}compress}/f:*.{gz,Z,z,zip,taz,tgz}/ \ - N/{-d,--{de,un}compress}/f:*.{gz,Z,z,zip,taz,tgz}/ \ - n/*/f:^*.{gz,Z,z,zip,taz,tgz}/ - complete {gunzip,ungzip} c/--/"(stdout to-stdout force help list license \ - no-name quiet recurse suffix test verbose version)"/ \ - c/-/"(c f h l L n q r S t v V -)"/ \ - n/{-S,--suffix}/x:''/ \ - n/*/f:*.{gz,Z,z,zip,taz,tgz}/ - complete zgrep c/-*A/x:'<#_lines_after>'/ c/-*B/x:'<#_lines_before>'/\ - c/-/"(A b B c C e f h i l n s v V w x)"/ \ - p/1/x:''/ \ - n/-*e/x:''/ n/-*f/f/ n/*/f/ - complete zegrep c/-*A/x:'<#_lines_after>'/ c/-*B/x:'<#_lines_before>'/\ - c/-/"(A b B c C e f h i l n s v V w x)"/ \ - p/1/x:''/ \ - n/-*e/x:''/ n/-*f/f/ n/*/f/ - complete zfgrep c/-*A/x:'<#_lines_after>'/ c/-*B/x:'<#_lines_before>'/\ - c/-/"(A b B c C e f h i l n s v V w x)"/ \ - p/1/x:''/ \ - n/-*e/x:''/ n/-*f/f/ n/*/f/ - - complete znew c/-/"(f t v 9 P K)"/ n/*/f:*.Z/ - complete zmore n/*/f:*.{gz,Z,z,zip}/ - complete zfile n/*/f:*.{gz,Z,z,zip,taz,tgz}/ - complete ztouch n/*/f:*.{gz,Z,z,zip,taz,tgz}/ - complete zforce n/*/f:^*.{gz,tgz}/ - - complete grep c/-*A/x:'<#_lines_after>'/ c/-*B/x:'<#_lines_before>'/\ - c/-/"(A b B c C e f h i l n s v V w x)"/ \ - p/1/x:''/ \ - n/-*e/x:''/ n/-*f/f/ n/*/f/ - complete egrep c/-*A/x:'<#_lines_after>'/ c/-*B/x:'<#_lines_before>'/\ - c/-/"(A b B c C e f h i l n s v V w x)"/ \ - p/1/x:''/ \ - n/-*e/x:''/ n/-*f/f/ n/*/f/ - complete fgrep c/-*A/x:'<#_lines_after>'/ c/-*B/x:'<#_lines_before>'/\ - c/-/"(A b B c C e f h i l n s v V w x)"/ \ - p/1/x:''/ \ - n/-*e/x:''/ n/-*f/f/ n/*/f/ - - complete users c/--/"(help version)"/ p/1/x:''/ - complete who c/--/"(heading mesg idle count help message version \ - writable)"/ c/-/"(H T w i u m q s -)"/ \ - p/1/x:''/ n/am/"(i)"/ n/are/"(you)"/ - - complete chown c/--/"(changes silent quiet verbose recursive help \ - version)"/ c/-/"(c f v R -)"/ C@[./\$~]@f@ c/*[.:]/g/ \ - n/-/u/. p/1/u/. n/*/f/ - complete chgrp c/--/"(changes silent quiet verbose recursive help \ - version)"/ c/-/"(c f v R -)"/ n/-/g/ p/1/g/ n/*/f/ - - complete cat c/--/"(number-nonblank number squeeze-blank show-all \ - show-nonprinting show-ends show-tabs help version)"/ \ - c/-/"(b e n s t u v A E T -)"/ n/*/f/ -if ($?traditional_cp_mv_complete) then - complete mv c/--/"(backup force interactive update verbose suffix \ - version-control help version)"/ \ - c/-/"(b f i u v S V -)"/ \ - n/{-S,--suffix}/x:''/ \ - n/{-V,--version-control}/"(t numbered nil existing \ - never simple)"/ n/-/f/ N/-/d/ p/1/f/ p/2/d/ n/*/f/ - complete cp c/--/"(archive backup no-dereference force interactive \ - link preserve symbolic-link update verbose parents \ - one-file-system recursive suffix version-control help \ - version)"/ c/-/"(a b d f i l p r s u v x P R S V -)"/ \ - n/-*r/d/ n/{-S,--suffix}/x:''/ \ - n/{-V,--version-control}/"(t numbered nil existing \ - never simple)"/ n/-/f/ N/-/d/ p/1/f/ p/2/d/ n/*/f/ -else - complete mv c/--/"(backup force interactive update verbose suffix \ - version-control help version)"/ \ - c/-/"(b f i u v S V -)"/ \ - n/{-S,--suffix}/x:''/ \ - n/{-V,--version-control}/"(t numbered nil existing \ - never simple)"/ n/-/f/ N/-/d/ n/*/f/ - complete cp c/--/"(archive backup no-dereference force interactive \ - link preserve symbolic-link update verbose parents \ - one-file-system recursive suffix version-control help \ - version)"/ c/-/"(a b d f i l p r s u v x P R S V -)"/ \ - n/-*r/d/ n/{-S,--suffix}/x:''/ \ - n/{-V,--version-control}/"(t numbered nil existing \ - never simple)"/ n/-/f/ N/-/d/ n/*/f/ -endif - complete ln c/--/"(backup directory force interactive symbolic \ - verbose suffix version-control help version)"/ \ - c/-/"(b d F f i s v S V -)"/ \ - n/{-S,--suffix}/x:''/ \ - n/{-V,--version-control}/"(t numbered nil existing \ - never simple)"/ n/-/f/ N/-/x:''/ \ - p/1/f/ p/2/x:''/ - complete touch c/--/"(date file help time version)"/ \ - c/-/"(a c d f m r t -)"/ \ - n/{-d,--date}/x:''/ \ - c/--time/"(access atime mtime modify use)"/ \ - n/{-r,--file}/f/ n/-t/x:''/ n/*/f/ - complete mkdir c/--/"(parents help version mode)"/ c/-/"(p m -)"/ \ - n/{-m,--mode}/x:''/ n/*/d/ - complete rmdir c/--/"(parents help version)"/ c/-/"(p -)"/ n/*/d/ - - complete tar c/-[Acru]*/"(b B C f F g G h i l L M N o P \ - R S T v V w W X z Z)"/ \ - c/-[dtx]*/"( B C f F g G i k K m M O p P \ - R s S T v w x X z Z)"/ \ - p/1/"(A c d r t u x -A -c -d -r -t -u -x \ - --catenate --concatenate --create --diff --compare \ - --delete --append --list --update --extract --get)"/ \ - c/--/"(catenate concatenate create diff compare \ - delete append list update extract get atime-preserve \ - block-size read-full-blocks directory checkpoint file \ - force-local info-script new-volume-script incremental \ - listed-incremental dereference ignore-zeros \ - ignore-failed-read keep-old-files starting-file \ - one-file-system tape-length modification-time \ - multi-volume after-date newer old-archive portability \ - to-stdout same-permissions preserve-permissions \ - absolute-paths preserve record-number remove-files \ - same-order preserve-order same-owner sparse \ - files-from null totals verbose label version \ - interactive confirmation verify exclude exclude-from \ - compress uncompress gzip ungzip use-compress-program \ - block-compress)"/ \ - c/-/"(b B C f F g G h i k K l L m M N o O p P R s S \ - T v V w W X z Z 0 1 2 3 4 5 6 7 -)"/ \ - n/-c*f/x:''/ \ - n/{-[Adrtux]*f,--file}/f:*.tar/ \ - N/{-x*f,--file}/'`tar -tf $:-1`'/ \ - n/--use-compress-program/c/ \ - n/{-b,--block-size}/x:''/ \ - n/{-V,--label}/x:''/ \ - n/{-N,--{after-date,newer}}/x:''/ \ - n/{-L,--tape-length}/x:''/ \ - n/{-C,--directory}/d/ \ - N/{-C,--directory}/'`\ls $:-1`'/ \ - n/-[0-7]/"(l m h)"/ - - # BSD 4.3 filesystems - complete mount c/-/"(a h v t r)"/ n/-h/\$hosts/ n/-t/"(4.2 nfs)"/ \ - n@*@'`cut -d " " -f 2 /etc/fstab`'@ - complete umount c/-/"(a h v t)"/ n/-h/\$hosts/ n/-t/"(4.2 nfs)"/ \ - n/*/'`mount | cut -d " " -f 3`'/ - # BSD 4.2 filesystems - #complete mount c/-/"(a h v t r)"/ n/-h/\$hosts/ n/-t/"(ufs nfs)"/ \ - # n@*@'`cut -d ":" -f 2 /etc/fstab`'@ - #complete umount c/-/"(a h v t)"/ n/-h/\$hosts/ n/-t/"(ufs nfs)"/ \ - # n/*/'`mount | cut -d " " -f 3`'/ - - # these deal with NIS (formerly YP); if it's not running you don't need 'em - complete domainname p@1@D:$_ypdir@" " n@*@n@ - complete ypcat c@-@"(d k t x)"@ n@-x@n@ n@-d@D:$_ypdir@" " \ - N@-d@\`\\ls\ -1\ $_ypdir/\$:-1\ \|\ sed\ -n\ s%\\\\.pag\\\$%%p\`@ \ - n@*@\`\\ls\ -1\ $_ypdir/$_domain\ \|\ sed\ -n\ s%\\\\.pag\\\$%%p\`@ - complete ypmatch c@-@"(d k t x)"@ n@-x@n@ n@-d@D:$_ypdir@" " \ - N@-d@x:''@ n@-@x:''@ p@1@x:''@ \ - n@*@\`\\ls\ -1\ $_ypdir/$_domain\ \|\ sed\ -n\ s%\\\\.pag\\\$%%p\`@ - complete ypwhich c@-@"(d m t x V1 V2)"@ n@-x@n@ n@-d@D:$_ypdir@" " \ - n@-m@\`\\ls\ -1\ $_ypdir/$_domain\ \|\ sed\ -n\ s%\\\\.pag\\\$%%p\`@ \ - N@-m@n@ n@*@\$hosts@ - - # there's no need to clutter the user's shell with these - unset _elispdir _maildir _ypdir _domain - - complete make \ - 'n/-f/f/' \ - 'c/*=/f/' \ - 'n@*@`cat -s GNUmakefile Makefile makefile |& sed -n -e "/No such file/d" -e "/^[^ #].*:/s/:.*//p"`@' - - if ( -f /etc/printcap ) then - set printers=(`sed -n -e "/^[^ #].*:/s/:.*//p" /etc/printcap`) - - complete lpr 'c/-P/$printers/' - complete lpq 'c/-P/$printers/' - complete lprm 'c/-P/$printers/' - complete lpquota 'p/1/(-Qprlogger)/' 'c/-P/$printers/' - complete dvips 'c/-P/$printers/' 'n/-o/f:*.{ps,PS}/' 'n/*/f:*.dvi/' - endif - -# New -if (! $?no_new_complete) then - uncomplete vi - complete {vi,vim,gvim,nvi,elvis} n/*/f:^*.{o,a,so,sa,aux,dvi,log,fig,bbl,blg,bst,idx,ilg,ind,toc}/ - complete {ispell,spell,spellword} 'n@-d@`ls /usr/lib/ispell/*.aff | sed -e "s/\.aff//" `@' 'n/*/f:^*.{o,a,so,sa,aux,dvi,log,fig,bbl,blg,bst,idx,ilg,ind,toc}/' - complete mutt 'n/-[ai]/f/' 'n/-c/u/' c@=@F:$HOME/Mail/@ \ - 'n/-s/x:\/' 'n/[^-]/u/' - complete elm 'n/-[Ai]/f/' 'c@=@F:$HOME/Mail/@' 'n/-s/x:\/' - complete ncftp 'n@*@`sed -e '1,2d' $HOME/.ncftp/bookmarks | cut -f 1,2 -d "," | tr "," "\012" | sort | uniq ` '@ - complete bibtex 'n@*@`ls *.aux | sed -e "s/\.aux//"`'@ - complete dvi2tty n/*/f:*.dvi/ # Only files that match *.dvi - complete {xpdf,acroread} 'n/*/f:*.pdf/' - complete {gv,ghostview} 'n/*/f:*.{ps,eps,epsi}/' - complete enscript \ - 'c/--/(columns= pages= header= no-header truncate-lines \ - line-numbers setpagedevice= escapes font= \ - header-font= fancy-header no-job-header \ - highlight-bars indent= filter= borders page-prefeed \ - no-page-prefeed lineprinter lines-per-page= mail \ - media= copies= newline= output= missing-characters \ - printer= quiet silent landscape portrait \ - baselineskip= statusdict= title= tabsize= underlay= \ - verbose version encoding pass-through download-font= \ - filter-stdin= help highlight-bar-gray= list-media \ - list-options non-printable-format= page-label-format= \ - printer-options= ul-angle= ul-font= ul-gray= \ - ul-position= ul-style= \ - )/' -endif # ! $?no_new_complete - -# Debian specific -if (! $?no_debian_complete) then -complete dpkg 'c/--{admindir,instdir,root}=/d/' \ - 'c/--debug=/n/' \ - 'c/--{admindir,debug,instdir,root}/(=)//' \ - 'c/--/(admindir= debug= instdir= root= \ - assert-support-predepends assert-working-epoch \ - audit auto-deconfigure clear-avail \ - compare-versions configure contents control \ - extract force-bad-path field \ - force-configure-any force-conflicts \ - force-depends force-depends-version force-help \ - force-hold force-non-root \ - force-overwrite-diverted \ - force-remove-essential force-remove-reinstreq \ - forget-old-unavail fsys-tarfile get-selections \ - help ignore-depends info install largemem \ - license list listfiles merge-avail no-act \ - pending predep-package print-architecture \ - print-gnu-build-architecture \ - print-installation-architecture print-avail \ - purge record-avail recursive refuse-downgrade \ - remove search set-selections selected-only \ - skip-same-version smallmem status unpack \ - update-avail version vextract \ - )//' \ - 'n/*/f:*.deb'/ -complete dpkg-deb 'c/--{build}=/d/' \ - 'c/--/"( build contents info field control extract \ - vextract fsys-tarfile help version \ - license )"' \ - 'n/*/f:*.deb/' -endif # ! $?no_debian_complete - - unset noglob - unset complete - unset traditional_complete - unset traditional_cp_mv_complete - unset traditional_zcat_complete - unset traditional_nm_complete - unset traditilnal_tex_complete - unset traditional_find_complete - unset traditional_configure_complete - unset traditional_rm_complete - unset foolproof_rm_complete - unset no_new_complete - unset no_debian_complete -endif - -end: - onintr - diff --git a/tests/examplefiles/test.vb b/tests/examplefiles/test.vb deleted file mode 100644 index e7252e90..00000000 --- a/tests/examplefiles/test.vb +++ /dev/null @@ -1,407 +0,0 @@ -' Copyright (c) 2008 Silken Web - Free BSD License -' All rights reserved. -' -' Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: -' * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer -' * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. -' * Neither the name of Silken Web nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. -' -' THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, -' THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS -' BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE -' GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -' LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -' DAMAGE. - -Imports System.Net.Mail -Imports SilkenWeb.Entities -Imports System.Text.RegularExpressions -Imports System.Reflection -Imports SilkenWeb.Validation -Imports System.Globalization -Imports SilkenWeb.Reflection - -Namespace SilkenWeb - - ''' - ''' Represents an Email and what you can do with it. - ''' - ''' - ''' Keith Jackson - ''' 11/04/2008 - ''' - ''' This class is intended to be inherrited for providing all manner of system generated emails, each represented by it's own class. - ''' - Public MustInherit Class EmailBase : Implements IValidatable, IDisposable - -#Region " Constants " - - Public Const LenientRegexPattern As String = "\w+([-+.]\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*" - Public Const StrictRegexPattern As String = "^(([^<>()[\]\\.,;:\s@\""]+(\.[^<>()[\]\\.,;:\s@\""]+)*)|(\"".+\""))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$" - Public Const InvalidEmailAddressError As String = "The Email address provided was invalid" - Public Const InvalidEmailAddressErrorWithAddress As String = "The Email address, {0}, provided was invalid" - Public Const NullEmailAddressError As String = "The Email address was not provided" - -#End Region - -#Region " Fields " - - Private disposedValue As Boolean - - Private _message As MailMessage = New MailMessage() - Private _mailClient As SmtpClient - - Private _useStrictValidation As Boolean - -#End Region - -#Region " Construction " - - ''' - ''' Instantiates a new Email of the derived type. - ''' - ''' The email address of the sender of the message. - ''' The email addresses of the recipients of the message. - ''' The subject of the message. - ''' The body of the message. - Protected Sub New(ByVal sender As String, ByVal subject As String, ByVal body As String, ByVal ParamArray recipients As String()) - _message.From = New MailAddress(sender) - For i As Integer = 0 To recipients.Length - 1 - _message.To.Add(recipients(i)) - Next - _message.Subject = subject - _message.Body = body - End Sub - -#End Region - -#Region " Properties " - - ''' - ''' Gets the Attachments for the message. - ''' - Protected Overridable ReadOnly Property Attachments() As AttachmentCollection - Get - Return _message.Attachments - End Get - End Property - - ''' - ''' The email addresses of the BCC recipients of the message. - ''' - Public Property BccRecipients() As String() - Get - Return _message.Bcc.ToAddressStringArray() - End Get - Set(ByVal value As String()) - _message.Bcc.Clear() - _message.Bcc.Add(value.ToDelimitedString()) - End Set - End Property - - ''' - ''' The body of the message. - ''' - Protected Overridable Property Body() As String - Get - Return _message.Body - End Get - Set(ByVal value As String) - _message.Body = value - End Set - End Property - - ''' - ''' The email addresses of the CC recipients of the message. - ''' - Public Property CCRecipients() As String() - Get - Return _message.CC.ToAddressStringArray() - End Get - Set(ByVal value As String()) - _message.CC.Clear() - _message.CC.Add(value.ToDelimitedString()) - End Set - End Property - - ''' - ''' Gets or Sets a flag to indicate if the body of the message is HTML. - ''' - Public Property IsBodyHtml() As Boolean - Get - Return _message.IsBodyHtml - End Get - Set(ByVal value As Boolean) - _message.IsBodyHtml = value - End Set - End Property - - ''' - ''' Gets the Mail message wrapped by the EmailBase class. - ''' - Protected ReadOnly Property Message() As MailMessage - Get - Return _message - End Get - End Property - - ''' - ''' Gets or Sets the Priority of the message. - ''' - Public Property Priority() As MailPriority - Get - Return _message.Priority - End Get - Set(ByVal value As MailPriority) - _message.Priority = value - End Set - End Property - - ''' - ''' The email addresses of the recipients of the message. - ''' - Public Property Recipients() As String() - Get - Return _message.To.ToAddressStringArray() - End Get - Set(ByVal value As String()) - _message.To.Clear() - _message.To.Add(value.ToDelimitedString()) - End Set - End Property - - ''' - ''' The reply email address of the sender of the message. - ''' - Public Property ReplyTo() As String - Get - If _message.ReplyTo Is Nothing Then - Return String.Empty - Else - Return _message.ReplyTo.Address - End If - End Get - Set(ByVal value As String) - If _message.ReplyTo Is Nothing Then - _message.ReplyTo = New MailAddress(value) - Else - _message.ReplyTo = New MailAddress(value, _message.ReplyTo.DisplayName) - End If - End Set - End Property - - ''' - ''' The reply display name of the sender of the message. - ''' - Public Property ReplyToDisplayName() As String - Get - If _message.ReplyTo Is Nothing Then - Return String.Empty - Else - Return _message.ReplyTo.DisplayName - End If - End Get - Set(ByVal value As String) - If _message.ReplyTo Is Nothing Then - _message.ReplyTo = New MailAddress(_message.From.Address, value) - Else - _message.ReplyTo = New MailAddress(_message.ReplyTo.Address, value) - End If - End Set - End Property - - ''' - ''' The email address of the sender of the message. - ''' - Public Overridable Property Sender() As String - Get - Return _message.From.Address - End Get - Protected Set(ByVal value As String) - _message.From = New MailAddress(value, _message.From.DisplayName) - End Set - End Property - - ''' - ''' The display name of the sender of the message. - ''' - Public Overridable Property SenderDisplayName() As String - Get - Return _message.From.DisplayName - End Get - Protected Set(ByVal value As String) - _message.From = New MailAddress(_message.From.Address, value) - End Set - End Property - - ''' - ''' The subject of the message. - ''' - Public Overridable Property Subject() As String - Get - Return _message.Subject - End Get - Protected Set(ByVal value As String) - _message.Subject = value - End Set - End Property - -#End Region - -#Region " Methods " - -#Region " Send Methods " - - ''' - ''' Sends this email - ''' - ''' The SMTP server to use to send the email. - Public Sub Send(ByVal mailServer As String) - _mailClient = New SmtpClient(mailServer) - _mailClient.Send(_message) - End Sub - - ''' - ''' Sends this email asynchronously. - ''' - ''' The SMTP server to use to send the email. - ''' A user defined token passed to the recieving method on completion of the asynchronous task. - Public Sub SendAsync(ByVal mailServer As String, ByVal userToken As Object) - _mailClient = New SmtpClient(mailServer) - _mailClient.SendAsync(_message, userToken) - End Sub - - ''' - ''' Cancels an attempt to send this email asynchronously. - ''' - Public Sub SendAsyncCancel() - _mailClient.SendAsyncCancel() - End Sub - -#End Region - -#End Region - -#Region " IValidatable Implementation " - - ''' - ''' gets and Sets a flag to indicate whether to use strict validation. - ''' - Public Property UseStrictValidation() As Boolean - Get - Return _useStrictValidation - End Get - Set(ByVal value As Boolean) - _useStrictValidation = value - End Set - End Property - - ''' - ''' Validates this email. - ''' - ''' A ValidationResponse, containing a flag to indicate if validation was passed and a collection of Property Names and validation errors. - Public Function Validate() As ValidationResponse Implements IValidatable.Validate - - Dim retVal As New ValidationResponse() - Dim mailRegEx As String = If(_useStrictValidation, StrictRegexPattern, LenientRegexPattern) - - ValidateAddress("Sender", retVal, mailRegEx, True) - ValidateAddresses("Recipients", retVal, mailRegEx, True) - ValidateAddresses("CcRecipients", retVal, mailRegEx) - ValidateAddresses("BccRecipients", retVal, mailRegEx) - ValidateAddress("ReplyTo", retVal, mailRegEx) - - Return retVal - - End Function - - ''' - ''' Validates a single Email Address property. - ''' - ''' The name of the property to validate. - ''' The validation response object. - ''' The regular expression pattern to use for validation. - Private Overloads Sub ValidateAddress(ByVal propertyName As String, ByRef retVal As ValidationResponse, ByVal mailRegEx As String) - ValidateAddress(propertyName, retVal, mailRegEx, False) - End Sub - - ''' - ''' Validates a single Email Address property. - ''' - ''' The name of the property to validate. - ''' The validation response object. - ''' The regular expression pattern to use for validation. - ''' Indicates if the address is required; False if not specified. - Private Overloads Sub ValidateAddress(ByVal propertyName As String, ByRef retVal As ValidationResponse, ByVal mailRegEx As String, ByVal required As Boolean) - - Dim emailAddress As String = ReflectionHelper.Properties.GetProperty(Of String)(Me, propertyName) - - If emailAddress Is Nothing OrElse emailAddress.Length = 0 Then - If required Then retVal.Add(New KeyValuePair(Of String, String)(propertyName, NullEmailAddressError)) - Else - If (Not Regex.IsMatch(emailAddress, mailRegEx)) Then - retVal.Add(New KeyValuePair(Of String, String)(propertyName, InvalidEmailAddressError)) - End If - End If - - End Sub - - ''' - ''' Validates a string array of Email Address property. - ''' - ''' The name of the property to validate. - ''' The validation response object. - ''' The regular expression pattern to use for validation. - Private Overloads Sub ValidateAddresses(ByVal propertyName As String, ByRef retVal As ValidationResponse, ByVal mailRegEx As String) - ValidateAddresses(propertyName, retVal, mailRegEx, False) - End Sub - - ''' - ''' Validates a string array of Email Address property. - ''' - ''' The name of the property to validate. - ''' The validation response object. - ''' The regular expression pattern to use for validation. - ''' Indicates if the address is required; False if not specified. - Private Overloads Sub ValidateAddresses(ByVal propertyName As String, ByRef retVal As ValidationResponse, ByVal mailRegEx As String, ByVal required As Boolean) - - Dim emailAddresses() As String = ReflectionHelper.Properties.GetProperty(Of String())(Me, propertyName) - - If emailAddresses Is Nothing OrElse emailAddresses.Length = 0 Then - If required Then retVal.Add(New KeyValuePair(Of String, String)(propertyName, String.Format(CultureInfo.CurrentCulture, NullEmailAddressError))) - Else - For i As Integer = 0 To emailAddresses.Length - 1 - If (Not Regex.IsMatch(emailAddresses(i), mailRegEx)) Then - retVal.Add(New KeyValuePair(Of String, String)(propertyName, String.Format(CultureInfo.CurrentCulture, InvalidEmailAddressErrorWithAddress, emailAddresses(i)))) - End If - Next - End If - - End Sub - -#End Region - -#Region " IDisposable Implementation " - - Protected Overridable Sub Dispose(ByVal disposing As Boolean) - If Not Me.disposedValue Then - If disposing Then - _message.Dispose() - End If - _mailClient = Nothing - _message = Nothing - End If - Me.disposedValue = True - End Sub - - Public Sub Dispose() Implements IDisposable.Dispose - ' Do not change this code. Put cleanup code in Dispose(ByVal disposing As Boolean) above. - Dispose(True) - GC.SuppressFinalize(Me) - End Sub - -#End Region - - End Class - -End Namespace diff --git a/tests/examplefiles/test.vhdl b/tests/examplefiles/test.vhdl deleted file mode 100644 index 426f2375..00000000 --- a/tests/examplefiles/test.vhdl +++ /dev/null @@ -1,161 +0,0 @@ -library ieee; -use ieee.std_logic_unsigned.all; -use ieee.std_logic_1164.all; -use ieee.numeric_std.all; - - -entity top_testbench is --test - generic ( -- test - n : integer := 8 -- test - ); -- test -end top_testbench; -- test - - -architecture top_testbench_arch of top_testbench is - - component top is - generic ( - n : integer - ) ; - port ( - clk : in std_logic; - rst : in std_logic; - d1 : in std_logic_vector (n-1 downto 0); - d2 : in std_logic_vector (n-1 downto 0); - operation : in std_logic; - result : out std_logic_vector (2*n-1 downto 0) - ); - end component; - - signal clk : std_logic; - signal rst : std_logic; - signal operation : std_logic; - signal d1 : std_logic_vector (n-1 downto 0); - signal d2 : std_logic_vector (n-1 downto 0); - signal result : std_logic_vector (2*n-1 downto 0); - - type test_type is ( a1, a2, a3, a4, a5, a6, a7, a8, a9, a10); - attribute enum_encoding of my_state : type is "001 010 011 100 111"; -begin - - TESTUNIT : top generic map (n => n) - port map (clk => clk, - rst => rst, - d1 => d1, - d2 => d2, - operation => operation, - result => result); - - clock_process : process - begin - clk <= '0'; - wait for 5 ns; - clk <= '1'; - wait for 5 ns; - end process; - - data_process : process - begin - - -- test case #1 - operation <= '0'; - - rst <= '1'; - wait for 5 ns; - rst <= '0'; - wait for 5 ns; - - d1 <= std_logic_vector(to_unsigned(60, d1'length)); - d2 <= std_logic_vector(to_unsigned(12, d2'length)); - wait for 360 ns; - - assert (result = std_logic_vector(to_unsigned(720, result'length))) - report "Test case #1 failed" severity error; - - -- test case #2 - operation <= '0'; - - rst <= '1'; - wait for 5 ns; - rst <= '0'; - wait for 5 ns; - - d1 <= std_logic_vector(to_unsigned(55, d1'length)); - d2 <= std_logic_vector(to_unsigned(1, d2'length)); - wait for 360 ns; - - assert (result = std_logic_vector(to_unsigned(55, result'length))) - report "Test case #2 failed" severity error; - - -- etc - - end process; - -end top_testbench_arch; - - -configuration testbench_for_top of top_testbench is - for top_testbench_arch - for TESTUNIT : top - use entity work.top(top_arch); - end for; - end for; -end testbench_for_top; - - -function compare(A: std_logic, B: std_Logic) return std_logic is - constant pi : real := 3.14159; - constant half_pi : real := pi / 2.0; - constant cycle_time : time := 2 ns; - constant N, N5 : integer := 5; -begin - if (A = '0' and B = '1') then - return B; - else - return A; - end if ; -end compare; - - -procedure print(P : std_logic_vector(7 downto 0); - U : std_logic_vector(3 downto 0)) is - variable my_line : line; - alias swrite is write [line, string, side, width] ; -begin - swrite(my_line, "sqrt( "); - write(my_line, P); - swrite(my_line, " )= "); - write(my_line, U); - writeline(output, my_line); -end print; - - -entity add32csa is -- one stage of carry save adder for multiplier - port( - b : in std_logic; -- a multiplier bit - a : in std_logic_vector(31 downto 0); -- multiplicand - sum_in : in std_logic_vector(31 downto 0); -- sums from previous stage - cin : in std_logic_vector(31 downto 0); -- carrys from previous stage - sum_out : out std_logic_vector(31 downto 0); -- sums to next stage - cout : out std_logic_vector(31 downto 0)); -- carrys to next stage -end add32csa; - - -ARCHITECTURE circuits of add32csa IS - SIGNAL zero : STD_LOGIC_VECTOR(31 downto 0) := X"00000000"; - SIGNAL aa : std_logic_vector(31 downto 0) := X"00000000"; - - COMPONENT fadd -- duplicates entity port - PoRT(a : in std_logic; - b : in std_logic; - cin : in std_logic; - s : out std_logic; - cout : out std_logic); - end comPonent fadd; - -begin -- circuits of add32csa - aa <= a when b='1' else zero after 1 ns; - stage: for I in 0 to 31 generate - sta: fadd port map(aa(I), sum_in(I), cin(I) , sum_out(I), cout(I)); - end generate stage; -end architecture circuits; -- of add32csa diff --git a/tests/examplefiles/test.xqy b/tests/examplefiles/test.xqy deleted file mode 100644 index c626ea96..00000000 --- a/tests/examplefiles/test.xqy +++ /dev/null @@ -1,138 +0,0 @@ -(: made up functions, etc just to test xquery parsing (: even embedded comments -on multiple :) -lines -:) -xquery version "1.0"; - -module namespace xqueryexample "http://example.com/namespace"; -import module namespace importedns = "http://example.com/ns/imported" at "no/such/file.xqy"; - -declare namespace sess = "com.example.session"; - -declare variable $amazing := "awesome"; -declare variable $SESSIONS as element(sess:session)* := c:sessions(); - -declare option sess:clear "false"; - -define function whatsit($param as xs:string) as xs:string { - let $var1 := 1 - let $var2 := 2 - return (1 + 2 div ($var1 + $var2)) - - let $let := "test" - return (: some whitespace :) element element { - attribute attribute { 1 }, - element test { 'a' }, - attribute foo { "bar" }, - fn:doc()[ foo/@bar eq $let ], - //x/with/another/*/*:version/xpath/@attr } -}; - -let $bride := "Bride" -let $test := validate lax { html } -let $test := validate strict { html } -let $test := validate { html } -let $test := $var1/*:Article (: comment here :) [fn:not()] -let $test := $var1/@*:name/fn:string() - -let $noop := ordered { $test } -let $noop := unordered { $test } - -let $noop := - for $version at $i in $versions/version - let $row := if($i mod 2 eq 0) then "even" else "odd" - order by $version descending - return - -return - -{ - - - The Princess { fn:capitalize($bride) } - -
    - - { - (: placeholder for local sessions :) - element div { - attribute id { "sessions-local" }, - attribute class { "hidden" }, - element h1 { "Local Sessions" }, - element p { - 'These sessions use storage provided by your browser.', - 'You can also ', - element a { - attribute href { 'session-import-local.xqy' }, - 'import' }, - ' sessions from local XML files.' - } - } - } - { - for $i in $sessions - let $id := c:session-id($i) - let $uri := c:session-uri($i) - (: we only care about the lock that expires last :) - let $conflicting := c:conflicting-locks($uri, 1) - let $name as xs:string := ($i/sess:name, "(unnamed)")[1] - return element tr { - element td { $name }, - element td { string($i/sec:user) }, - element td { data($i/sess:created) }, - element td { data($i/sess:last-modified) }, - element td { - if (empty($conflicting)) then () else - text { - "by", $conflicting/lock:owner, - "until", adjust-dateTime-to-timezone( - x:epoch-seconds-to-dateTime( - $conflicting/lock:timestamp + $conflicting/lock:timeout - ) - ) - }, - (: only show resume button if there are no conflicting locks :) - element input { - attribute type { "button" }, - attribute title { - data($i/sess:query-buffers/sess:query[1]) }, - attribute onclick { - concat("list.resumeSession('", $id, "')") }, - attribute value { - "Resume", (' ', $id)[ $d:DEBUG ] } - }[ not($conflicting) ], - $x:NBSP, - (: clone button :) - element input { - attribute type { "button" }, - attribute title { "clone this session" }, - attribute onclick { - concat("list.cloneSession('", $id, "', this)") }, - attribute value { "Clone", (' ', $id)[ $d:DEBUG ] } - }, - $x:NBSP, - (: export button :) - element input { - attribute type { "button" }, - attribute title { "export this session" }, - attribute onclick { - concat("list.exportServerSession('", $id, "', this)") }, - attribute value { "Export", (' ', $id)[ $d:DEBUG ] } - }, - $x:NBSP, - (: only show delete button if there are no conflicting locks :) - element input { - attribute type { "button" }, - attribute title { "permanently delete this session" }, - attribute onclick { - concat("list.deleteSession('", $id, "', this)") }, - attribute value { "Delete", (' ', $id)[ $d:DEBUG ] } - }[ not($conflicting) ] - } - } - } -
    -
    -} -   - diff --git a/tests/examplefiles/test.xsl b/tests/examplefiles/test.xsl deleted file mode 100644 index 590bb043..00000000 --- a/tests/examplefiles/test.xsl +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/examplefiles/test.zep b/tests/examplefiles/test.zep deleted file mode 100644 index 4724d4c4..00000000 --- a/tests/examplefiles/test.zep +++ /dev/null @@ -1,33 +0,0 @@ -namespace Test; - -use Test\Foo; - -class Bar -{ - protected a; - private b; - public c {set, get}; - - public function __construct(string str, boolean bool) - { - let this->c = str; - this->setC(bool); - let this->b = []; - } - - public function sayHello(string name) - { - echo "Hello " . name; - } - - protected function loops() - { - for a in b { - echo a; - } - loop { - return "boo!"; - } - } - -} \ No newline at end of file diff --git a/tests/examplefiles/test2.odin b/tests/examplefiles/test2.odin deleted file mode 100644 index 2a6b4517..00000000 --- a/tests/examplefiles/test2.odin +++ /dev/null @@ -1,30 +0,0 @@ -school_schedule = < - lesson_times = <08:30:00, 09:30:00, 10:30:00, ...> - - locations = < - [1] = <"under the big plane tree"> - [2] = <"under the north arch"> - [3] = <"in a garden"> - > - - subjects = < - ["philosophy:plato"] = < -- note construction of key - name = <"philosophy"> - teacher = <"plato"> - topics = <"meta-physics", "natural science"> - weighting = <76%> - > - ["philosophy:kant"] = < - name = <"philosophy"> - teacher = <"kant"> - topics = <"meaning and reason", "meta-physics", "ethics"> - weighting = <80%> - > - ["art"] = < - name = <"art"> - teacher = <"goya"> - topics = <"technique", "portraiture", "satire"> - weighting = <78%> - > - > -> diff --git a/tests/examplefiles/test2.pypylog b/tests/examplefiles/test2.pypylog deleted file mode 100644 index 543e21dd..00000000 --- a/tests/examplefiles/test2.pypylog +++ /dev/null @@ -1,120 +0,0 @@ -[2f1dd6c3b8b7] {jit-log-opt-loop -# Loop 0 ( ds1dr4 dsdr3 ds1dr4) : loop with 115 ops -[p0, p1] -+33: label(p0, p1, descr=TargetToken(-1223434224)) -debug_merge_point(0, 0, ' ds1dr4 dsdr3 ds1dr4') -+33: guard_nonnull_class(p1, 138371488, descr=) [p1, p0] -+54: p3 = getfield_gc_pure(p1, descr=) -+57: guard_value(p3, ConstPtr(ptr4), descr=) [p1, p0, p3] -+69: p5 = getfield_gc_pure(p1, descr=) -+72: p7 = getarrayitem_gc(p5, 0, descr=) -+75: guard_class(p7, 138371552, descr=) [p0, p5, p7] -+88: p9 = getfield_gc(p7, descr=) -+91: guard_nonnull_class(p9, 138373024, descr=) [p0, p5, p7, p9] -+109: p12 = getarrayitem_gc(p5, 1, descr=) -+112: guard_class(p12, 138371552, descr=) [p0, p5, p12, p7] -+125: p14 = getfield_gc(p12, descr=) -+128: guard_nonnull_class(p14, 138373024, descr=) [p0, p5, p12, p14, p7] -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -+146: p16 = getfield_gc_pure(p9, descr=) -+149: guard_value(p16, ConstPtr(ptr17), descr=) [p16, p9, p0, p12, p7] -+161: p18 = getfield_gc_pure(p9, descr=) -+164: guard_class(p18, 138371648, descr=) [p18, p9, p0, p12, p7] -+177: p20 = getfield_gc_pure(p9, descr=) -+180: guard_class(p20, 138371648, descr=) [p20, p9, p18, p0, p12, p7] -+193: p22 = getfield_gc_pure(p9, descr=) -+196: guard_class(p22, 138371936, descr=) [p22, p9, p20, p18, p0, p12, p7] -debug_merge_point(0, 0, 'None') -+209: p24 = getfield_gc_pure(p22, descr=) -+215: guard_value(p24, ConstPtr(ptr25), descr=) [p24, p22, p9, None, None, p0, p12, p7] -+227: p27 = getfield_gc_pure(p22, descr=) -+230: guard_class(p27, 138371648, descr=) [p22, p27, p9, None, None, p0, p12, p7] -debug_merge_point(0, 0, '_') -debug_merge_point(0, 0, 'None') -+243: p30 = getfield_gc(ConstPtr(ptr29), descr=) -+249: i34 = call(ConstClass(ll_dict_lookup_trampoline__v64___simple_call__function_ll), p30, ConstPtr(ptr32), 360200661, descr=) -+281: guard_no_exception(, descr=) [p27, p20, p18, i34, p30, None, None, None, p0, p12, p7] -+294: i36 = int_and(i34, -2147483648) -+302: i37 = int_is_true(i36) -guard_false(i37, descr=) [p27, p20, p18, i34, p30, None, None, None, p0, p12, p7] -+311: p38 = getfield_gc(p30, descr=) -+314: p39 = getinteriorfield_gc(p38, i34, descr=>) -+318: i40 = instance_ptr_eq(p18, p39) -guard_true(i40, descr=) [p27, p20, None, None, None, p0, p12, p7] -debug_merge_point(0, 0, 'None') -+327: i41 = getfield_gc_pure(p20, descr=) -+330: i42 = getfield_gc_pure(p27, descr=) -+333: i43 = int_sub(i41, i42) -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -+335: i45 = int_eq(0, i43) -guard_false(i45, descr=) [p0, i43, None, None, None, None, p12, p7] -p47 = new_with_vtable(138371648) -+393: setfield_gc(p47, i43, descr=) -setfield_gc(p7, p47, descr=) -+414: p48 = getfield_gc(p12, descr=) -+420: guard_nonnull_class(p48, 138371648, descr=) [p0, p48, p12, p47, p7] -debug_merge_point(0, 0, ' 1 1 1 dsdr3 1') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, '_') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, ' dsdr3 dsdr3') -debug_merge_point(0, 0, ' ds1dr4 dsdr3 ds1dr4') -+438: label(p0, p48, p30, p38, descr=TargetToken(-1223434176)) -debug_merge_point(0, 0, ' ds1dr4 dsdr3 ds1dr4') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, '_') -debug_merge_point(0, 0, 'None') -+438: i50 = call(ConstClass(ll_dict_lookup_trampoline__v64___simple_call__function_ll), p30, ConstPtr(ptr32), 360200661, descr=) -+464: guard_no_exception(, descr=) [p48, i50, p30, p0] -+477: i51 = int_and(i50, -2147483648) -+485: i52 = int_is_true(i51) -guard_false(i52, descr=) [p48, i50, p30, p0] -+494: p53 = getinteriorfield_gc(p38, i50, descr=>) -+501: i55 = instance_ptr_eq(ConstPtr(ptr54), p53) -guard_true(i55, descr=) [p48, p0] -debug_merge_point(0, 0, 'None') -+513: i56 = getfield_gc_pure(p48, descr=) -+516: i58 = int_sub(i56, 1) -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -+519: i59 = int_eq(0, i58) -guard_false(i59, descr=) [i58, p48, p0] -debug_merge_point(0, 0, ' 1 1 1 dsdr3 1') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, '_') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, 'None') -debug_merge_point(0, 0, ' dsdr3 dsdr3') -debug_merge_point(0, 0, ' ds1dr4 dsdr3 ds1dr4') -p61 = new_with_vtable(138371700) -p63 = new_with_vtable(138373024) -p65 = new_with_vtable(138371936) -+606: setfield_gc(p63, ConstPtr(ptr66), descr=) -p68 = new_with_vtable(138373024) -+632: setfield_gc(p65, ConstPtr(ptr69), descr=) -p71 = new_with_vtable(138371936) -+658: setfield_gc(p68, ConstPtr(ptr17), descr=) -+665: setfield_gc(p71, ConstPtr(ptr72), descr=) -+672: setfield_gc(p68, p71, descr=) -+675: setfield_gc(p68, p48, descr=) -+678: setfield_gc(p68, ConstPtr(ptr54), descr=) -p73 = new_with_vtable(138371648) -+701: setfield_gc(p61, p0, descr=) -+716: setfield_gc(p61, 2, descr=) -+723: setfield_gc(p71, ConstPtr(ptr25), descr=) -+730: setfield_gc(p65, p68, descr=) -+733: setfield_gc(p63, p65, descr=) -+736: setfield_gc(p63, ConstPtr(ptr75), descr=) -+743: setfield_gc(p63, ConstPtr(ptr54), descr=) -+750: setfield_gc(p61, p63, descr=) -+753: setfield_gc(p73, i58, descr=) -+762: jump(p61, p73, p30, p38, descr=TargetToken(-1223434176)) -+775: --end of the loop-- -[2f1dd6da3b99] jit-log-opt-loop} diff --git a/tests/examplefiles/test_basic.adls b/tests/examplefiles/test_basic.adls deleted file mode 100644 index df5aa743..00000000 --- a/tests/examplefiles/test_basic.adls +++ /dev/null @@ -1,28 +0,0 @@ --- --- Example of an openEHR Archetype, written in the Archetype Definition Language (ADL) --- Definition available here: http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf --- Author: derived from the openEHR-EHR-EVALUATION.adverse_reaction.v1 archetype at http://www.openEHR.org/ckm --- - -archetype (adl_version=2.0.5; rm_release=1.0.2; generated) - openEHR-EHR-EVALUATION.adverse_reaction.v1.0.0 - -language - original_language = <[ISO_639-1::en]> - -description - lifecycle_state = <"unmanaged"> - -definition - EVALUATION[id1] - -terminology - term_definitions = < - ["en"] = < - ["id1"] = < - text = <"Adverse Reaction"> - description = <"xxx"> - > - > - > - diff --git a/tests/examplefiles/truncated.pytb b/tests/examplefiles/truncated.pytb deleted file mode 100644 index ad5b6d49..00000000 --- a/tests/examplefiles/truncated.pytb +++ /dev/null @@ -1,15 +0,0 @@ - File "/usr/lib/python2.3/site-packages/trac/web/main.py", line 314, in dispatch_request - dispatcher.dispatch(req) - File "/usr/lib/python2.3/site-packages/trac/web/main.py", line 186, in dispatch - req.session = Session(self.env, req) - File "/usr/lib/python2.3/site-packages/trac/web/session.py", line 52, in __init__ - self.promote_session(sid) - File "/usr/lib/python2.3/site-packages/trac/web/session.py", line 125, in promote_session - "AND authenticated=0", (sid,)) - File "/usr/lib/python2.3/site-packages/trac/db/util.py", line 47, in execute - return self.cursor.execute(sql_escape_percent(sql), args) - File "/usr/lib/python2.3/site-packages/trac/db/sqlite_backend.py", line 44, in execute - args or []) - File "/usr/lib/python2.3/site-packages/trac/db/sqlite_backend.py", line 36, in _rollback_on_error - return function(self, *args, **kwargs) -OperationalError: database is locked diff --git a/tests/examplefiles/tsql_example.sql b/tests/examplefiles/tsql_example.sql deleted file mode 100644 index cbd76091..00000000 --- a/tests/examplefiles/tsql_example.sql +++ /dev/null @@ -1,72 +0,0 @@ --- Example Transact-SQL file. - --- Single line comment -/* A comment - * spawning two lines. */ - /* An indented comment - * spawning multiple - * lines. */ -/* A /* nested */ comment. */ - -select - left(emp.firstname, 1) + '.' + [emp.surname] as "Name", - dep.name as [Department] -into - #temp_employee -from - employee as emp - inner join department as dep on - dep.ident_code = emp.department_id -where - emp.date_of_birth >= '1990-01-01'; -go - -declare @TextToFind nvarchar(100) = N'some -text across -multiple lines'; - -set @TextToFind varchar(32) = 'hello' + ' world'; -set @TextTiFind += '!'; - -declare @Count int = 17 * (3 - 5); - -delete from - [server].[database].[schema].[table] -where - [Text] = @TextToFind and author Not LIKE '%some%'; - -goto overthere; -overthere: - -select - 123 as "int 1", - +123 as "int 2", - -123 as "int 3", - 0x20 as "hex int", - 123.45 as "float 1", - -1.23e45 as "float 2" - +1.23E+45 as "float 3", - -1.23e-45 as "float 4", - 1. as "float 5", - .1 as "float 6", - 1.e2 as "float 7", - .1e2 as "float 8"; - -Select @@Error, $PARTITion.RangePF1(10); - -select top 3 Ähnliches from Müll; - --- Example transaction -BEGIN TRAN - -BEGIN TRY - INSERT INTO #temp_employe(Name, Department) VALUES ('L. Miller', 'Sales') - iNsErT inTO #temp_employe(Name, Department) VaLuEs ('M. Webster', 'Helpdesk') - COMMIT TRAN -END TRY -BEGIN CATCH - print 'cannot perform transaction; rolling back'; - ROLLBACK TRAN -END CATCH - --- Comment at end without newline. \ No newline at end of file diff --git a/tests/examplefiles/twig_test b/tests/examplefiles/twig_test deleted file mode 100644 index 0932fe90..00000000 --- a/tests/examplefiles/twig_test +++ /dev/null @@ -1,4612 +0,0 @@ -From the Twig test suite, https://github.com/fabpot/Twig, available under BSD license. - ---TEST-- -Exception for an unclosed tag ---TEMPLATE-- -{% block foo %} - {% if foo %} - - - - - {% for i in fo %} - - - - {% endfor %} - - - -{% endblock %} ---EXCEPTION-- -Twig_Error_Syntax: Unexpected tag name "endblock" (expecting closing tag for the "if" tag defined near line 4) in "index.twig" at line 16 ---TEST-- -Exception for an undefined trait ---TEMPLATE-- -{% use 'foo' with foobar as bar %} ---TEMPLATE(foo)-- -{% block bar %} -{% endblock %} ---EXCEPTION-- -Twig_Error_Runtime: Block "foobar" is not defined in trait "foo" in "index.twig". ---TEST-- -Twig supports method calls ---TEMPLATE-- -{{ items.foo }} -{{ items['foo'] }} -{{ items[foo] }} -{{ items[items[foo]] }} ---DATA-- -return array('foo' => 'bar', 'items' => array('foo' => 'bar', 'bar' => 'foo')) ---EXPECT-- -bar -bar -foo -bar ---TEST-- -Twig supports array notation ---TEMPLATE-- -{# empty array #} -{{ []|join(',') }} - -{{ [1, 2]|join(',') }} -{{ ['foo', "bar"]|join(',') }} -{{ {0: 1, 'foo': 'bar'}|join(',') }} -{{ {0: 1, 'foo': 'bar'}|keys|join(',') }} - -{{ {0: 1, foo: 'bar'}|join(',') }} -{{ {0: 1, foo: 'bar'}|keys|join(',') }} - -{# nested arrays #} -{% set a = [1, 2, [1, 2], {'foo': {'foo': 'bar'}}] %} -{{ a[2]|join(',') }} -{{ a[3]["foo"]|join(',') }} - -{# works even if [] is used inside the array #} -{{ [foo[bar]]|join(',') }} - -{# elements can be any expression #} -{{ ['foo'|upper, bar|upper, bar == foo]|join(',') }} - -{# arrays can have a trailing , like in PHP #} -{{ - [ - 1, - 2, - ]|join(',') -}} - -{# keys can be any expression #} -{% set a = 1 %} -{% set b = "foo" %} -{% set ary = { (a): 'a', (b): 'b', 'c': 'c', (a ~ b): 'd' } %} -{{ ary|keys|join(',') }} -{{ ary|join(',') }} ---DATA-- -return array('bar' => 'bar', 'foo' => array('bar' => 'bar')) ---EXPECT-- -1,2 -foo,bar -1,bar -0,foo - -1,bar -0,foo - -1,2 -bar - -bar - -FOO,BAR, - -1,2 - -1,foo,c,1foo -a,b,c,d ---TEST-- -Twig supports binary operations (+, -, *, /, ~, %, and, or) ---TEMPLATE-- -{{ 1 + 1 }} -{{ 2 - 1 }} -{{ 2 * 2 }} -{{ 2 / 2 }} -{{ 3 % 2 }} -{{ 1 and 1 }} -{{ 1 and 0 }} -{{ 0 and 1 }} -{{ 0 and 0 }} -{{ 1 or 1 }} -{{ 1 or 0 }} -{{ 0 or 1 }} -{{ 0 or 0 }} -{{ 0 or 1 and 0 }} -{{ 1 or 0 and 1 }} -{{ "foo" ~ "bar" }} -{{ foo ~ "bar" }} -{{ "foo" ~ bar }} -{{ foo ~ bar }} -{{ 20 // 7 }} ---DATA-- -return array('foo' => 'bar', 'bar' => 'foo') ---EXPECT-- -2 -1 -4 -1 -1 -1 - - - -1 -1 -1 - - -1 -foobar -barbar -foofoo -barfoo -2 ---TEST-- -Twig supports bitwise operations ---TEMPLATE-- -{{ 1 b-and 5 }} -{{ 1 b-or 5 }} -{{ 1 b-xor 5 }} -{{ (1 and 0 b-or 0) is same as(1 and (0 b-or 0)) ? 'ok' : 'ko' }} ---DATA-- -return array() ---EXPECT-- -1 -5 -4 -ok ---TEST-- -Twig supports comparison operators (==, !=, <, >, >=, <=) ---TEMPLATE-- -{{ 1 > 2 }}/{{ 1 > 1 }}/{{ 1 >= 2 }}/{{ 1 >= 1 }} -{{ 1 < 2 }}/{{ 1 < 1 }}/{{ 1 <= 2 }}/{{ 1 <= 1 }} -{{ 1 == 1 }}/{{ 1 == 2 }} -{{ 1 != 1 }}/{{ 1 != 2 }} ---DATA-- -return array() ---EXPECT-- -///1 -1//1/1 -1/ -/1 ---TEST-- -Twig supports the "divisible by" operator ---TEMPLATE-- -{{ 8 is divisible by(2) ? 'OK' }} -{{ 8 is not divisible by(3) ? 'OK' }} -{{ 8 is divisible by (2) ? 'OK' }} -{{ 8 is not - divisible - by - (3) ? 'OK' }} ---DATA-- -return array() ---EXPECT-- -OK -OK -OK -OK ---TEST-- -Twig supports the .. operator ---TEMPLATE-- -{% for i in 0..10 %}{{ i }} {% endfor %} - -{% for letter in 'a'..'z' %}{{ letter }} {% endfor %} - -{% for letter in 'a'|upper..'z'|upper %}{{ letter }} {% endfor %} - -{% for i in foo[0]..foo[1] %}{{ i }} {% endfor %} - -{% for i in 0 + 1 .. 10 - 1 %}{{ i }} {% endfor %} ---DATA-- -return array('foo' => array(1, 10)) ---EXPECT-- -0 1 2 3 4 5 6 7 8 9 10 -a b c d e f g h i j k l m n o p q r s t u v w x y z -A B C D E F G H I J K L M N O P Q R S T U V W X Y Z -1 2 3 4 5 6 7 8 9 10 -1 2 3 4 5 6 7 8 9 ---TEST-- -Twig supports the "ends with" operator ---TEMPLATE-- -{{ 'foo' ends with 'o' ? 'OK' : 'KO' }} -{{ not ('foo' ends with 'f') ? 'OK' : 'KO' }} -{{ not ('foo' ends with 'foowaytoolong') ? 'OK' : 'KO' }} -{{ 'foo' ends with '' ? 'OK' : 'KO' }} -{{ '1' ends with true ? 'OK' : 'KO' }} -{{ 1 ends with true ? 'OK' : 'KO' }} -{{ 0 ends with false ? 'OK' : 'KO' }} -{{ '' ends with false ? 'OK' : 'KO' }} -{{ false ends with false ? 'OK' : 'KO' }} -{{ false ends with '' ? 'OK' : 'KO' }} ---DATA-- -return array() ---EXPECT-- -OK -OK -OK -OK -KO -KO -KO -KO -KO -KO ---TEST-- -Twig supports grouping of expressions ---TEMPLATE-- -{{ (2 + 2) / 2 }} ---DATA-- -return array() ---EXPECT-- -2 ---TEST-- -Twig supports literals ---TEMPLATE-- -1 {{ true }} -2 {{ TRUE }} -3 {{ false }} -4 {{ FALSE }} -5 {{ none }} -6 {{ NONE }} -7 {{ null }} -8 {{ NULL }} ---DATA-- -return array() ---EXPECT-- -1 1 -2 1 -3 -4 -5 -6 -7 -8 ---TEST-- -Twig supports __call() for attributes ---TEMPLATE-- -{{ foo.foo }} -{{ foo.bar }} ---EXPECT-- -foo_from_call -bar_from_getbar ---TEST-- -Twig supports the "matches" operator ---TEMPLATE-- -{{ 'foo' matches '/o/' ? 'OK' : 'KO' }} -{{ 'foo' matches '/^fo/' ? 'OK' : 'KO' }} -{{ 'foo' matches '/O/i' ? 'OK' : 'KO' }} ---DATA-- -return array() ---EXPECT-- -OK -OK -OK ---TEST-- -Twig supports method calls ---TEMPLATE-- -{{ items.foo.foo }} -{{ items.foo.getFoo() }} -{{ items.foo.bar }} -{{ items.foo['bar'] }} -{{ items.foo.bar('a', 43) }} -{{ items.foo.bar(foo) }} -{{ items.foo.self.foo() }} -{{ items.foo.is }} -{{ items.foo.in }} -{{ items.foo.not }} ---DATA-- -return array('foo' => 'bar', 'items' => array('foo' => new TwigTestFoo(), 'bar' => 'foo')) ---CONFIG-- -return array('strict_variables' => false) ---EXPECT-- -foo -foo -bar - -bar_a-43 -bar_bar -foo -is -in -not ---TEST-- -Twig allows to use named operators as variable names ---TEMPLATE-- -{% for match in matches %} - {{- match }} -{% endfor %} -{{ in }} -{{ is }} ---DATA-- -return array('matches' => array(1, 2, 3), 'in' => 'in', 'is' => 'is') ---EXPECT-- -1 -2 -3 -in -is ---TEST-- -Twig parses postfix expressions ---TEMPLATE-- -{% import _self as macros %} - -{% macro foo() %}foo{% endmacro %} - -{{ 'a' }} -{{ 'a'|upper }} -{{ ('a')|upper }} -{{ -1|upper }} -{{ macros.foo() }} -{{ (macros).foo() }} ---DATA-- -return array(); ---EXPECT-- -a -A -A --1 -foo -foo ---TEST-- -Twig supports the "same as" operator ---TEMPLATE-- -{{ 1 is same as(1) ? 'OK' }} -{{ 1 is not same as(true) ? 'OK' }} -{{ 1 is same as(1) ? 'OK' }} -{{ 1 is not same as(true) ? 'OK' }} -{{ 1 is same as (1) ? 'OK' }} -{{ 1 is not - same - as - (true) ? 'OK' }} ---DATA-- -return array() ---EXPECT-- -OK -OK -OK -OK -OK -OK ---TEST-- -Twig supports the "starts with" operator ---TEMPLATE-- -{{ 'foo' starts with 'f' ? 'OK' : 'KO' }} -{{ not ('foo' starts with 'oo') ? 'OK' : 'KO' }} -{{ not ('foo' starts with 'foowaytoolong') ? 'OK' : 'KO' }} -{{ 'foo' starts with 'f' ? 'OK' : 'KO' }} -{{ 'foo' starts -with 'f' ? 'OK' : 'KO' }} -{{ 'foo' starts with '' ? 'OK' : 'KO' }} -{{ '1' starts with true ? 'OK' : 'KO' }} -{{ '' starts with false ? 'OK' : 'KO' }} -{{ 'a' starts with false ? 'OK' : 'KO' }} -{{ false starts with '' ? 'OK' : 'KO' }} ---DATA-- -return array() ---EXPECT-- -OK -OK -OK -OK -OK -OK -KO -KO -KO -KO ---TEST-- -Twig supports string interpolation ---TEMPLATE-- -{# "foo #{"foo #{bar} baz"} baz" #} -{# "foo #{bar}#{bar} baz" #} ---DATA-- -return array('bar' => 'BAR'); ---EXPECT-- -foo foo BAR baz baz -foo BARBAR baz ---TEST-- -Twig supports the ternary operator ---TEMPLATE-- -{{ 1 ? 'YES' }} -{{ 0 ? 'YES' }} ---DATA-- -return array() ---EXPECT-- -YES - ---TEST-- -Twig supports the ternary operator ---TEMPLATE-- -{{ 'YES' ?: 'NO' }} -{{ 0 ?: 'NO' }} ---DATA-- -return array() ---EXPECT-- -YES -NO ---TEST-- -Twig supports the ternary operator ---TEMPLATE-- -{{ 1 ? 'YES' : 'NO' }} -{{ 0 ? 'YES' : 'NO' }} -{{ 0 ? 'YES' : (1 ? 'YES1' : 'NO1') }} -{{ 0 ? 'YES' : (0 ? 'YES1' : 'NO1') }} -{{ 1 == 1 ? 'foo
    ':'' }} -{{ foo ~ (bar ? ('-' ~ bar) : '') }} ---DATA-- -return array('foo' => 'foo', 'bar' => 'bar') ---EXPECT-- -YES -NO -YES1 -NO1 -foo
    -foo-bar ---TEST-- -Twig does not allow to use two-word named operators as variable names ---TEMPLATE-- -{{ starts with }} ---DATA-- -return array() ---EXCEPTION-- -Twig_Error_Syntax: Unexpected token "operator" of value "starts with" in "index.twig" at line 2 ---TEST-- -Twig unary operators precedence ---TEMPLATE-- -{{ -1 - 1 }} -{{ -1 - -1 }} -{{ -1 * -1 }} -{{ 4 / -1 * 5 }} ---DATA-- -return array() ---EXPECT-- --2 -0 -1 --20 ---TEST-- -Twig supports unary operators (not, -, +) ---TEMPLATE-- -{{ not 1 }}/{{ not 0 }} -{{ +1 + 1 }}/{{ -1 - 1 }} -{{ not (false or true) }} ---DATA-- -return array() ---EXPECT-- -/1 -2/-2 - ---TEST-- -"abs" filter ---TEMPLATE-- -{{ (-5.5)|abs }} -{{ (-5)|abs }} -{{ (-0)|abs }} -{{ 0|abs }} -{{ 5|abs }} -{{ 5.5|abs }} -{{ number1|abs }} -{{ number2|abs }} -{{ number3|abs }} -{{ number4|abs }} -{{ number5|abs }} -{{ number6|abs }} ---DATA-- -return array('number1' => -5.5, 'number2' => -5, 'number3' => -0, 'number4' => 0, 'number5' => 5, 'number6' => 5.5) ---EXPECT-- -5.5 -5 -0 -0 -5 -5.5 -5.5 -5 -0 -0 -5 -5.5 ---TEST-- -"batch" filter ---TEMPLATE-- -{% for row in items|batch(3.1) %} -
    - {% for column in row %} -
    {{ column }}
    - {% endfor %} -
    -{% endfor %} ---DATA-- -return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) ---EXPECT-- -
    -
    a
    -
    b
    -
    c
    -
    d
    -
    -
    -
    e
    -
    f
    -
    g
    -
    h
    -
    -
    -
    i
    -
    j
    -
    ---TEST-- -"batch" filter ---TEMPLATE-- -{% for row in items|batch(3) %} -
    - {% for column in row %} -
    {{ column }}
    - {% endfor %} -
    -{% endfor %} ---DATA-- -return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) ---EXPECT-- -
    -
    a
    -
    b
    -
    c
    -
    -
    -
    d
    -
    e
    -
    f
    -
    -
    -
    g
    -
    h
    -
    i
    -
    -
    -
    j
    -
    ---TEST-- -"batch" filter ---TEMPLATE-- - -{% for row in items|batch(3, '') %} - - {% for column in row %} - - {% endfor %} - -{% endfor %} -
    {{ column }}
    ---DATA-- -return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) ---EXPECT-- - - - - - - - - - - - - - - - - - - - - - -
    abc
    def
    ghi
    j
    ---TEST-- -"batch" filter ---TEMPLATE-- -{% for row in items|batch(3, 'fill') %} -
    - {% for column in row %} -
    {{ column }}
    - {% endfor %} -
    -{% endfor %} ---DATA-- -return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l')) ---EXPECT-- -
    -
    a
    -
    b
    -
    c
    -
    -
    -
    d
    -
    e
    -
    f
    -
    -
    -
    g
    -
    h
    -
    i
    -
    -
    -
    j
    -
    k
    -
    l
    -
    ---TEST-- -"batch" filter ---TEMPLATE-- - -{% for row in items|batch(3, 'fill') %} - - {% for column in row %} - - {% endfor %} - -{% endfor %} -
    {{ column }}
    ---DATA-- -return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')) ---EXPECT-- - - - - - - - - - - - - - - - - - - - - - -
    abc
    def
    ghi
    jfillfill
    ---TEST-- -"convert_encoding" filter ---CONDITION-- -function_exists('iconv') || function_exists('mb_convert_encoding') ---TEMPLATE-- -{{ "愛していますか?"|convert_encoding('ISO-2022-JP', 'UTF-8')|convert_encoding('UTF-8', 'ISO-2022-JP') }} ---DATA-- -return array() ---EXPECT-- -愛していますか? ---TEST-- -"date" filter (interval support as of PHP 5.3) ---CONDITION-- -version_compare(phpversion(), '5.3.0', '>=') ---TEMPLATE-- -{{ date2|date }} -{{ date2|date('%d days') }} ---DATA-- -date_default_timezone_set('UTC'); -$twig->getExtension('core')->setDateFormat('Y-m-d', '%d days %h hours'); -return array( - 'date2' => new DateInterval('P2D'), -) ---EXPECT-- -2 days 0 hours -2 days ---TEST-- -"date" filter ---TEMPLATE-- -{{ date1|date }} -{{ date1|date('d/m/Y') }} ---DATA-- -date_default_timezone_set('UTC'); -$twig->getExtension('core')->setDateFormat('Y-m-d', '%d days %h hours'); -return array( - 'date1' => mktime(13, 45, 0, 10, 4, 2010), -) ---EXPECT-- -2010-10-04 -04/10/2010 ---TEST-- -"date" filter ---CONDITION-- -version_compare(phpversion(), '5.5.0', '>=') ---TEMPLATE-- -{{ date1|date }} -{{ date1|date('d/m/Y') }} -{{ date1|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }} -{{ date1|date('d/m/Y H:i:s', timezone1) }} -{{ date1|date('d/m/Y H:i:s') }} - -{{ date2|date('d/m/Y H:i:s P', 'Europe/Paris') }} -{{ date2|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }} -{{ date2|date('d/m/Y H:i:s P', false) }} -{{ date2|date('e', 'Europe/Paris') }} -{{ date2|date('e', false) }} ---DATA-- -date_default_timezone_set('Europe/Paris'); -return array( - 'date1' => new DateTimeImmutable('2010-10-04 13:45'), - 'date2' => new DateTimeImmutable('2010-10-04 13:45', new DateTimeZone('America/New_York')), - 'timezone1' => new DateTimeZone('America/New_York'), -) ---EXPECT-- -October 4, 2010 13:45 -04/10/2010 -04/10/2010 19:45:00 -04/10/2010 07:45:00 -04/10/2010 13:45:00 - -04/10/2010 19:45:00 +02:00 -05/10/2010 01:45:00 +08:00 -04/10/2010 13:45:00 -04:00 -Europe/Paris -America/New_York ---TEST-- -"date" filter (interval support as of PHP 5.3) ---CONDITION-- -version_compare(phpversion(), '5.3.0', '>=') ---TEMPLATE-- -{{ date1|date }} -{{ date1|date('%d days %h hours') }} -{{ date1|date('%d days %h hours', timezone1) }} ---DATA-- -date_default_timezone_set('UTC'); -return array( - 'date1' => new DateInterval('P2D'), - // This should have no effect on DateInterval formatting - 'timezone1' => new DateTimeZone('America/New_York'), -) ---EXPECT-- -2 days -2 days 0 hours -2 days 0 hours ---TEST-- -"date_modify" filter ---TEMPLATE-- -{{ date1|date_modify('-1day')|date('Y-m-d H:i:s') }} -{{ date2|date_modify('-1day')|date('Y-m-d H:i:s') }} ---DATA-- -date_default_timezone_set('UTC'); -return array( - 'date1' => '2010-10-04 13:45', - 'date2' => new DateTime('2010-10-04 13:45'), -) ---EXPECT-- -2010-10-03 13:45:00 -2010-10-03 13:45:00 ---TEST-- -"date" filter ---TEMPLATE-- -{{ date|date(format='d/m/Y H:i:s P', timezone='America/Chicago') }} -{{ date|date(timezone='America/Chicago', format='d/m/Y H:i:s P') }} -{{ date|date('d/m/Y H:i:s P', timezone='America/Chicago') }} ---DATA-- -date_default_timezone_set('UTC'); -return array('date' => mktime(13, 45, 0, 10, 4, 2010)) ---EXPECT-- -04/10/2010 08:45:00 -05:00 -04/10/2010 08:45:00 -05:00 -04/10/2010 08:45:00 -05:00 ---TEST-- -"date" filter ---TEMPLATE-- -{{ date1|date }} -{{ date1|date('d/m/Y') }} -{{ date1|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }} -{{ date1|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }} -{{ date1|date('d/m/Y H:i:s P', 'America/Chicago') }} -{{ date1|date('e') }} -{{ date1|date('d/m/Y H:i:s') }} - -{{ date2|date }} -{{ date2|date('d/m/Y') }} -{{ date2|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }} -{{ date2|date('d/m/Y H:i:s', timezone1) }} -{{ date2|date('d/m/Y H:i:s') }} - -{{ date3|date }} -{{ date3|date('d/m/Y') }} - -{{ date4|date }} -{{ date4|date('d/m/Y') }} - -{{ date5|date }} -{{ date5|date('d/m/Y') }} - -{{ date6|date('d/m/Y H:i:s P', 'Europe/Paris') }} -{{ date6|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }} -{{ date6|date('d/m/Y H:i:s P', false) }} -{{ date6|date('e', 'Europe/Paris') }} -{{ date6|date('e', false) }} - -{{ date7|date }} ---DATA-- -date_default_timezone_set('Europe/Paris'); -return array( - 'date1' => mktime(13, 45, 0, 10, 4, 2010), - 'date2' => new DateTime('2010-10-04 13:45'), - 'date3' => '2010-10-04 13:45', - 'date4' => 1286199900, // DateTime::createFromFormat('Y-m-d H:i', '2010-10-04 13:45', new DateTimeZone('UTC'))->getTimestamp() -- A unixtimestamp is always GMT - 'date5' => -189291360, // DateTime::createFromFormat('Y-m-d H:i', '1964-01-02 03:04', new DateTimeZone('UTC'))->getTimestamp(), - 'date6' => new DateTime('2010-10-04 13:45', new DateTimeZone('America/New_York')), - 'date7' => '2010-01-28T15:00:00+05:00', - 'timezone1' => new DateTimeZone('America/New_York'), -) ---EXPECT-- -October 4, 2010 13:45 -04/10/2010 -04/10/2010 19:45:00 -04/10/2010 19:45:00 +08:00 -04/10/2010 06:45:00 -05:00 -Europe/Paris -04/10/2010 13:45:00 - -October 4, 2010 13:45 -04/10/2010 -04/10/2010 19:45:00 -04/10/2010 07:45:00 -04/10/2010 13:45:00 - -October 4, 2010 13:45 -04/10/2010 - -October 4, 2010 15:45 -04/10/2010 - -January 2, 1964 04:04 -02/01/1964 - -04/10/2010 19:45:00 +02:00 -05/10/2010 01:45:00 +08:00 -04/10/2010 13:45:00 -04:00 -Europe/Paris -America/New_York - -January 28, 2010 11:00 ---TEST-- -"default" filter ---TEMPLATE-- -Variable: -{{ definedVar |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ zeroVar |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ emptyVar |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ nullVar |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ undefinedVar |default('default') is same as('default') ? 'ok' : 'ko' }} -Array access: -{{ nested.definedVar |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ nested['definedVar'] |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ nested.zeroVar |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ nested.emptyVar |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ nested.nullVar |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ nested.undefinedVar |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ nested['undefinedVar'] |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ undefinedVar.foo |default('default') is same as('default') ? 'ok' : 'ko' }} -Plain values: -{{ 'defined' |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ 0 |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ '' |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ null |default('default') is same as('default') ? 'ok' : 'ko' }} -Precedence: -{{ 'o' ~ nullVar |default('k') }} -{{ 'o' ~ nested.nullVar |default('k') }} -Object methods: -{{ object.foo |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ object.undefinedMethod |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ object.getFoo() |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ object.getFoo('a') |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ object.undefinedMethod() |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ object.undefinedMethod('a') |default('default') is same as('default') ? 'ok' : 'ko' }} -Deep nested: -{{ nested.undefinedVar.foo.bar |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ nested.definedArray.0 |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ nested['definedArray'][0] |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ object.self.foo |default('default') is same as('default') ? 'ko' : 'ok' }} -{{ object.self.undefinedMethod |default('default') is same as('default') ? 'ok' : 'ko' }} -{{ object.undefinedMethod.self |default('default') is same as('default') ? 'ok' : 'ko' }} ---DATA-- -return array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'emptyVar' => '', - 'nullVar' => null, - 'nested' => array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'emptyVar' => '', - 'nullVar' => null, - 'definedArray' => array(0), - ), - 'object' => new TwigTestFoo(), -) ---CONFIG-- -return array('strict_variables' => false) ---EXPECT-- -Variable: -ok -ok -ok -ok -ok -Array access: -ok -ok -ok -ok -ok -ok -ok -ok -Plain values: -ok -ok -ok -ok -Precedence: -ok -ok -Object methods: -ok -ok -ok -ok -ok -ok -Deep nested: -ok -ok -ok -ok -ok -ok ---DATA-- -return array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'emptyVar' => '', - 'nullVar' => null, - 'nested' => array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'emptyVar' => '', - 'nullVar' => null, - 'definedArray' => array(0), - ), - 'object' => new TwigTestFoo(), -) ---CONFIG-- -return array('strict_variables' => true) ---EXPECT-- -Variable: -ok -ok -ok -ok -ok -Array access: -ok -ok -ok -ok -ok -ok -ok -ok -Plain values: -ok -ok -ok -ok -Precedence: -ok -ok -Object methods: -ok -ok -ok -ok -ok -ok -Deep nested: -ok -ok -ok -ok -ok -ok ---TEST-- -dynamic filter ---TEMPLATE-- -{{ 'bar'|foo_path }} -{{ 'bar'|a_foo_b_bar }} ---DATA-- -return array() ---EXPECT-- -foo/bar -a/b/bar ---TEST-- -"escape" filter does not escape with the html strategy when using the html_attr strategy ---TEMPLATE-- -{{ '
    '|escape('html_attr') }} ---DATA-- -return array() ---EXPECT-- -<br /> ---TEST-- -"escape" filter ---TEMPLATE-- -{{ "愛していますか?
    "|e }} ---DATA-- -return array() ---EXPECT-- -愛していますか? <br /> ---TEST-- -"escape" filter ---TEMPLATE-- -{{ "foo
    "|e }} ---DATA-- -return array() ---EXPECT-- -foo <br /> ---TEST-- -"first" filter ---TEMPLATE-- -{{ [1, 2, 3, 4]|first }} -{{ {a: 1, b: 2, c: 3, d: 4}|first }} -{{ '1234'|first }} -{{ arr|first }} -{{ 'Ä€é'|first }} -{{ ''|first }} ---DATA-- -return array('arr' => new ArrayObject(array(1, 2, 3, 4))) ---EXPECT-- -1 -1 -1 -1 -Ä ---TEST-- -"escape" filter ---TEMPLATE-- -{% set foo %} - foo
    -{% endset %} - -{{ foo|e('html') -}} -{{ foo|e('js') }} -{% autoescape true %} - {{ foo }} -{% endautoescape %} ---DATA-- -return array() ---EXPECT-- - foo<br /> -\x20\x20\x20\x20foo\x3Cbr\x20\x2F\x3E\x0A - foo
    ---TEST-- -"format" filter ---TEMPLATE-- -{{ string|format(foo, 3) }} ---DATA-- -return array('string' => '%s/%d', 'foo' => 'bar') ---EXPECT-- -bar/3 ---TEST-- -"join" filter ---TEMPLATE-- -{{ ["foo", "bar"]|join(', ') }} -{{ foo|join(', ') }} -{{ bar|join(', ') }} ---DATA-- -return array('foo' => new TwigTestFoo(), 'bar' => new ArrayObject(array(3, 4))) ---EXPECT-- -foo, bar -1, 2 -3, 4 ---TEST-- -"json_encode" filter ---TEMPLATE-- -{{ "foo"|json_encode|raw }} -{{ foo|json_encode|raw }} -{{ [foo, "foo"]|json_encode|raw }} ---DATA-- -return array('foo' => new Twig_Markup('foo', 'UTF-8')) ---EXPECT-- -"foo" -"foo" -["foo","foo"] ---TEST-- -"last" filter ---TEMPLATE-- -{{ [1, 2, 3, 4]|last }} -{{ {a: 1, b: 2, c: 3, d: 4}|last }} -{{ '1234'|last }} -{{ arr|last }} -{{ 'Ä€é'|last }} -{{ ''|last }} ---DATA-- -return array('arr' => new ArrayObject(array(1, 2, 3, 4))) ---EXPECT-- -4 -4 -4 -4 -é ---TEST-- -"length" filter ---TEMPLATE-- -{{ array|length }} -{{ string|length }} -{{ number|length }} -{{ markup|length }} ---DATA-- -return array('array' => array(1, 4), 'string' => 'foo', 'number' => 1000, 'markup' => new Twig_Markup('foo', 'UTF-8')) ---EXPECT-- -2 -3 -4 -3 ---TEST-- -"length" filter ---CONDITION-- -function_exists('mb_get_info') ---TEMPLATE-- -{{ string|length }} -{{ markup|length }} ---DATA-- -return array('string' => 'été', 'markup' => new Twig_Markup('foo', 'UTF-8')) ---EXPECT-- -3 -3 ---TEST-- -"merge" filter ---TEMPLATE-- -{{ items|merge({'bar': 'foo'})|join }} -{{ items|merge({'bar': 'foo'})|keys|join }} -{{ {'bar': 'foo'}|merge(items)|join }} -{{ {'bar': 'foo'}|merge(items)|keys|join }} -{{ numerics|merge([4, 5, 6])|join }} ---DATA-- -return array('items' => array('foo' => 'bar'), 'numerics' => array(1, 2, 3)) ---EXPECT-- -barfoo -foobar -foobar -barfoo -123456 ---TEST-- -"nl2br" filter ---TEMPLATE-- -{{ "I like Twig.\nYou will like it too.\n\nEverybody like it!"|nl2br }} -{{ text|nl2br }} ---DATA-- -return array('text' => "If you have some HTML\nit will be escaped.") ---EXPECT-- -I like Twig.
    -You will like it too.
    -
    -Everybody like it! -If you have some <strong>HTML</strong>
    -it will be escaped. ---TEST-- -"number_format" filter with defaults. ---TEMPLATE-- -{{ 20|number_format }} -{{ 20.25|number_format }} -{{ 20.25|number_format(1) }} -{{ 20.25|number_format(2, ',') }} -{{ 1020.25|number_format }} -{{ 1020.25|number_format(2, ',') }} -{{ 1020.25|number_format(2, ',', '.') }} ---DATA-- -$twig->getExtension('core')->setNumberFormat(2, '!', '='); -return array(); ---EXPECT-- -20!00 -20!25 -20!3 -20,25 -1=020!25 -1=020,25 -1.020,25 ---TEST-- -"number_format" filter ---TEMPLATE-- -{{ 20|number_format }} -{{ 20.25|number_format }} -{{ 20.25|number_format(2) }} -{{ 20.25|number_format(2, ',') }} -{{ 1020.25|number_format(2, ',') }} -{{ 1020.25|number_format(2, ',', '.') }} ---DATA-- -return array(); ---EXPECT-- -20 -20 -20.25 -20,25 -1,020,25 -1.020,25 ---TEST-- -"replace" filter ---TEMPLATE-- -{{ "I like %this% and %that%."|replace({'%this%': "foo", '%that%': "bar"}) }} ---DATA-- -return array() ---EXPECT-- -I like foo and bar. ---TEST-- -"reverse" filter ---TEMPLATE-- -{{ [1, 2, 3, 4]|reverse|join('') }} -{{ '1234évènement'|reverse }} -{{ arr|reverse|join('') }} -{{ {'a': 'c', 'b': 'a'}|reverse()|join(',') }} -{{ {'a': 'c', 'b': 'a'}|reverse(preserveKeys=true)|join(glue=',') }} -{{ {'a': 'c', 'b': 'a'}|reverse(preserve_keys=true)|join(glue=',') }} ---DATA-- -return array('arr' => new ArrayObject(array(1, 2, 3, 4))) ---EXPECT-- -4321 -tnemenèvé4321 -4321 -a,c -a,c -a,c ---TEST-- -"round" filter ---TEMPLATE-- -{{ 2.7|round }} -{{ 2.1|round }} -{{ 2.1234|round(3, 'floor') }} -{{ 2.1|round(0, 'ceil') }} - -{{ 21.3|round(-1)}} -{{ 21.3|round(-1, 'ceil')}} -{{ 21.3|round(-1, 'floor')}} ---DATA-- -return array() ---EXPECT-- -3 -2 -2.123 -3 - -20 -30 -20 ---TEST-- -"slice" filter ---TEMPLATE-- -{{ [1, 2, 3, 4][1:2]|join('') }} -{{ {a: 1, b: 2, c: 3, d: 4}[1:2]|join('') }} -{{ [1, 2, 3, 4][start:length]|join('') }} -{{ [1, 2, 3, 4]|slice(1, 2)|join('') }} -{{ [1, 2, 3, 4]|slice(1, 2)|keys|join('') }} -{{ [1, 2, 3, 4]|slice(1, 2, true)|keys|join('') }} -{{ {a: 1, b: 2, c: 3, d: 4}|slice(1, 2)|join('') }} -{{ {a: 1, b: 2, c: 3, d: 4}|slice(1, 2)|keys|join('') }} -{{ '1234'|slice(1, 2) }} -{{ '1234'[1:2] }} -{{ arr|slice(1, 2)|join('') }} -{{ arr[1:2]|join('') }} - -{{ [1, 2, 3, 4]|slice(1)|join('') }} -{{ [1, 2, 3, 4][1:]|join('') }} -{{ '1234'|slice(1) }} -{{ '1234'[1:] }} -{{ '1234'[:1] }} ---DATA-- -return array('start' => 1, 'length' => 2, 'arr' => new ArrayObject(array(1, 2, 3, 4))) ---EXPECT-- -23 -23 -23 -23 -01 -12 -23 -bc -23 -23 -23 -23 - -234 -234 -234 -234 -1 ---TEST-- -"sort" filter ---TEMPLATE-- -{{ array1|sort|join }} -{{ array2|sort|join }} ---DATA-- -return array('array1' => array(4, 1), 'array2' => array('foo', 'bar')) ---EXPECT-- -14 -barfoo ---TEST-- -"split" filter ---TEMPLATE-- -{{ "one,two,three,four,five"|split(',')|join('-') }} -{{ foo|split(',')|join('-') }} -{{ foo|split(',', 3)|join('-') }} -{{ baz|split('')|join('-') }} -{{ baz|split('', 2)|join('-') }} -{{ foo|split(',', -2)|join('-') }} ---DATA-- -return array('foo' => "one,two,three,four,five", 'baz' => '12345',) ---EXPECT-- -one-two-three-four-five -one-two-three-four-five -one-two-three,four,five -1-2-3-4-5 -12-34-5 -one-two-three--TEST-- -"trim" filter ---TEMPLATE-- -{{ " I like Twig. "|trim }} -{{ text|trim }} -{{ " foo/"|trim("/") }} ---DATA-- -return array('text' => " If you have some HTML it will be escaped. ") ---EXPECT-- -I like Twig. -If you have some <strong>HTML</strong> it will be escaped. - foo ---TEST-- -"url_encode" filter for PHP < 5.4 and HHVM ---CONDITION-- -defined('PHP_QUERY_RFC3986') ---TEMPLATE-- -{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode }} -{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode|raw }} -{{ {}|url_encode|default("default") }} -{{ 'spéßi%le%c0d@dspa ce'|url_encode }} ---DATA-- -return array() ---EXPECT-- -foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= -foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= -default -sp%C3%A9%C3%9Fi%25le%25c0d%40dspa%20ce ---TEST-- -"url_encode" filter ---CONDITION-- -defined('PHP_QUERY_RFC3986') ---TEMPLATE-- -{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode }} -{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode|raw }} -{{ {}|url_encode|default("default") }} -{{ 'spéßi%le%c0d@dspa ce'|url_encode }} ---DATA-- -return array() ---EXPECT-- -foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= -foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce= -default -sp%C3%A9%C3%9Fi%25le%25c0d%40dspa%20ce ---TEST-- -"attribute" function ---TEMPLATE-- -{{ attribute(obj, method) }} -{{ attribute(array, item) }} -{{ attribute(obj, "bar", ["a", "b"]) }} -{{ attribute(obj, "bar", arguments) }} -{{ attribute(obj, method) is defined ? 'ok' : 'ko' }} -{{ attribute(obj, nonmethod) is defined ? 'ok' : 'ko' }} ---DATA-- -return array('obj' => new TwigTestFoo(), 'method' => 'foo', 'array' => array('foo' => 'bar'), 'item' => 'foo', 'nonmethod' => 'xxx', 'arguments' => array('a', 'b')) ---EXPECT-- -foo -bar -bar_a-b -bar_a-b -ok -ko ---TEST-- -"block" function ---TEMPLATE-- -{% extends 'base.twig' %} -{% block bar %}BAR{% endblock %} ---TEMPLATE(base.twig)-- -{% block foo %}{{ block('bar') }}{% endblock %} -{% block bar %}BAR_BASE{% endblock %} ---DATA-- -return array() ---EXPECT-- -BARBAR ---TEST-- -"constant" function ---TEMPLATE-- -{{ constant('DATE_W3C') == expect ? 'true' : 'false' }} -{{ constant('ARRAY_AS_PROPS', object) }} ---DATA-- -return array('expect' => DATE_W3C, 'object' => new ArrayObject(array('hi'))); ---EXPECT-- -true -2 ---TEST-- -"cycle" function ---TEMPLATE-- -{% for i in 0..6 %} -{{ cycle(array1, i) }}-{{ cycle(array2, i) }} -{% endfor %} ---DATA-- -return array('array1' => array('odd', 'even'), 'array2' => array('apple', 'orange', 'citrus')) ---EXPECT-- -odd-apple -even-orange -odd-citrus -even-apple -odd-orange -even-citrus -odd-apple ---TEST-- -"date" function ---TEMPLATE-- -{{ date(date, "America/New_York")|date('d/m/Y H:i:s P', false) }} -{{ date(timezone="America/New_York", date=date)|date('d/m/Y H:i:s P', false) }} ---DATA-- -date_default_timezone_set('UTC'); -return array('date' => mktime(13, 45, 0, 10, 4, 2010)) ---EXPECT-- -04/10/2010 09:45:00 -04:00 -04/10/2010 09:45:00 -04:00 ---TEST-- -"date" function ---TEMPLATE-- -{{ date() == date('now') ? 'OK' : 'KO' }} -{{ date(date1) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} -{{ date(date2) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} -{{ date(date3) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} -{{ date(date4) == date('2010-10-04 13:45') ? 'OK' : 'KO' }} -{{ date(date5) == date('1964-01-02 03:04') ? 'OK' : 'KO' }} ---DATA-- -date_default_timezone_set('UTC'); -return array( - 'date1' => mktime(13, 45, 0, 10, 4, 2010), - 'date2' => new DateTime('2010-10-04 13:45'), - 'date3' => '2010-10-04 13:45', - 'date4' => 1286199900, // DateTime::createFromFormat('Y-m-d H:i', '2010-10-04 13:45', new DateTimeZone('UTC'))->getTimestamp() -- A unixtimestamp is always GMT - 'date5' => -189291360, // DateTime::createFromFormat('Y-m-d H:i', '1964-01-02 03:04', new DateTimeZone('UTC'))->getTimestamp(), -) ---EXPECT-- -OK -OK -OK -OK -OK -OK ---TEST-- -"dump" function, xdebug is not loaded or xdebug <2.2-dev is loaded ---CONDITION-- -!extension_loaded('xdebug') || (($r = new ReflectionExtension('xdebug')) && version_compare($r->getVersion(), '2.2-dev', '<')) ---TEMPLATE-- -{{ dump() }} ---DATA-- -return array('foo' => 'foo', 'bar' => 'bar') ---CONFIG-- -return array('debug' => true, 'autoescape' => false); ---TEST-- -"dump" function ---CONDITION-- -!extension_loaded('xdebug') ---TEMPLATE-- -{{ dump('foo') }} -{{ dump('foo', 'bar') }} ---DATA-- -return array('foo' => 'foo', 'bar' => 'bar') ---CONFIG-- -return array('debug' => true, 'autoescape' => false); ---EXPECT-- -string(3) "foo" - -string(3) "foo" -string(3) "bar" ---TEST-- -dynamic function ---TEMPLATE-- -{{ foo_path('bar') }} -{{ a_foo_b_bar('bar') }} ---DATA-- -return array() ---EXPECT-- -foo/bar -a/b/bar ---TEST-- -"include" function ---TEMPLATE-- -{% set tmp = include("foo.twig") %} - -FOO{{ tmp }}BAR ---TEMPLATE(foo.twig)-- -FOOBAR ---DATA-- -return array() ---EXPECT-- -FOO -FOOBARBAR ---TEST-- -"include" function is safe for auto-escaping ---TEMPLATE-- -{{ include("foo.twig") }} ---TEMPLATE(foo.twig)-- -

    Test

    ---DATA-- -return array() ---EXPECT-- -

    Test

    ---TEST-- -"include" function ---TEMPLATE-- -FOO -{{ include("foo.twig") }} - -BAR ---TEMPLATE(foo.twig)-- -FOOBAR ---DATA-- -return array() ---EXPECT-- -FOO - -FOOBAR - -BAR ---TEST-- -"include" function allows expressions for the template to include ---TEMPLATE-- -FOO -{{ include(foo) }} - -BAR ---TEMPLATE(foo.twig)-- -FOOBAR ---DATA-- -return array('foo' => 'foo.twig') ---EXPECT-- -FOO - -FOOBAR - -BAR ---TEST-- -"include" function ---TEMPLATE-- -{{ include(["foo.twig", "bar.twig"], ignore_missing = true) }} -{{ include("foo.twig", ignore_missing = true) }} -{{ include("foo.twig", ignore_missing = true, variables = {}) }} -{{ include("foo.twig", ignore_missing = true, variables = {}, with_context = true) }} ---DATA-- -return array() ---EXPECT-- ---TEST-- -"include" function ---TEMPLATE-- -{% extends "base.twig" %} - -{% block content %} - {{ parent() }} -{% endblock %} ---TEMPLATE(base.twig)-- -{% block content %} - {{ include("foo.twig") }} -{% endblock %} ---DATA-- -return array(); ---EXCEPTION-- -Twig_Error_Loader: Template "foo.twig" is not defined in "base.twig" at line 3. ---TEST-- -"include" function ---TEMPLATE-- -{{ include("foo.twig") }} ---DATA-- -return array(); ---EXCEPTION-- -Twig_Error_Loader: Template "foo.twig" is not defined in "index.twig" at line 2. ---TEST-- -"include" tag sandboxed ---TEMPLATE-- -{{ include("foo.twig", sandboxed = true) }} ---TEMPLATE(foo.twig)-- -{{ foo|e }} ---DATA-- -return array() ---EXCEPTION-- -Twig_Sandbox_SecurityError: Filter "e" is not allowed in "index.twig" at line 2. ---TEST-- -"include" function accepts Twig_Template instance ---TEMPLATE-- -{{ include(foo) }} FOO ---TEMPLATE(foo.twig)-- -BAR ---DATA-- -return array('foo' => $twig->loadTemplate('foo.twig')) ---EXPECT-- -BAR FOO ---TEST-- -"include" function ---TEMPLATE-- -{{ include(["foo.twig", "bar.twig"]) }} -{{- include(["bar.twig", "foo.twig"]) }} ---TEMPLATE(foo.twig)-- -foo ---DATA-- -return array() ---EXPECT-- -foo -foo ---TEST-- -"include" function accept variables and with_context ---TEMPLATE-- -{{ include("foo.twig") }} -{{- include("foo.twig", with_context = false) }} -{{- include("foo.twig", {'foo1': 'bar'}) }} -{{- include("foo.twig", {'foo1': 'bar'}, with_context = false) }} ---TEMPLATE(foo.twig)-- -{% for k, v in _context %}{{ k }},{% endfor %} ---DATA-- -return array('foo' => 'bar') ---EXPECT-- -foo,global,_parent, -global,_parent, -foo,global,foo1,_parent, -foo1,global,_parent, ---TEST-- -"include" function accept variables ---TEMPLATE-- -{{ include("foo.twig", {'foo': 'bar'}) }} -{{- include("foo.twig", vars) }} ---TEMPLATE(foo.twig)-- -{{ foo }} ---DATA-- -return array('vars' => array('foo' => 'bar')) ---EXPECT-- -bar -bar ---TEST-- -"max" function ---TEMPLATE-- -{{ max([2, 1, 3, 5, 4]) }} -{{ max(2, 1, 3, 5, 4) }} -{{ max({2:"two", 1:"one", 3:"three", 5:"five", 4:"for"}) }} ---DATA-- -return array() ---EXPECT-- -5 -5 -two ---TEST-- -"min" function ---TEMPLATE-- -{{ min(2, 1, 3, 5, 4) }} -{{ min([2, 1, 3, 5, 4]) }} -{{ min({2:"two", 1:"one", 3:"three", 5:"five", 4:"for"}) }} ---DATA-- -return array() ---EXPECT-- -1 -1 -five ---TEST-- -"range" function ---TEMPLATE-- -{{ range(low=0+1, high=10+0, step=2)|join(',') }} ---DATA-- -return array() ---EXPECT-- -1,3,5,7,9 ---TEST-- -"block" function recursively called in a parent template ---TEMPLATE-- -{% extends "ordered_menu.twig" %} -{% block label %}"{{ parent() }}"{% endblock %} -{% block list %}{% set class = 'b' %}{{ parent() }}{% endblock %} ---TEMPLATE(ordered_menu.twig)-- -{% extends "menu.twig" %} -{% block list %}{% set class = class|default('a') %}
      {{ block('children') }}
    {% endblock %} ---TEMPLATE(menu.twig)-- -{% extends "base.twig" %} -{% block list %}
      {{ block('children') }}
    {% endblock %} -{% block children %}{% set currentItem = item %}{% for item in currentItem %}{{ block('item') }}{% endfor %}{% set item = currentItem %}{% endblock %} -{% block item %}
  • {% if item is not iterable %}{{ block('label') }}{% else %}{{ block('list') }}{% endif %}
  • {% endblock %} -{% block label %}{{ item }}{{ block('unknown') }}{% endblock %} ---TEMPLATE(base.twig)-- -{{ block('list') }} ---DATA-- -return array('item' => array('1', '2', array('3.1', array('3.2.1', '3.2.2'), '3.4'))) ---EXPECT-- -
    1. "1"
    2. "2"
      1. "3.1"
        1. "3.2.1"
        2. "3.2.2"
      2. "3.4"
    ---TEST-- -"source" function ---TEMPLATE-- -FOO -{{ source("foo.twig") }} - -BAR ---TEMPLATE(foo.twig)-- -{{ foo }}
    ---DATA-- -return array() ---EXPECT-- -FOO - -{{ foo }}
    - -BAR ---TEST-- -"template_from_string" function ---TEMPLATE-- -{% include template_from_string(template) %} - -{% include template_from_string("Hello {{ name }}") %} -{% include template_from_string('{% extends "parent.twig" %}{% block content %}Hello {{ name }}{% endblock %}') %} ---TEMPLATE(parent.twig)-- -{% block content %}{% endblock %} ---DATA-- -return array('name' => 'Fabien', 'template' => "Hello {{ name }}") ---EXPECT-- -Hello Fabien -Hello Fabien -Hello Fabien ---TEST-- -macro ---TEMPLATE-- -{% from _self import test %} - -{% macro test(a, b = 'bar') -%} -{{ a }}{{ b }} -{%- endmacro %} - -{{ test('foo') }} -{{ test('bar', 'foo') }} ---DATA-- -return array(); ---EXPECT-- -foobar -barfoo ---TEST-- -macro ---TEMPLATE-- -{% import _self as macros %} - -{% macro foo(data) %} - {{ data }} -{% endmacro %} - -{% macro bar() %} -
    -{% endmacro %} - -{{ macros.foo(macros.bar()) }} ---DATA-- -return array(); ---EXPECT-- -
    ---TEST-- -macro ---TEMPLATE-- -{% from _self import test %} - -{% macro test(this) -%} - {{ this }} -{%- endmacro %} - -{{ test(this) }} ---DATA-- -return array('this' => 'foo'); ---EXPECT-- -foo ---TEST-- -macro ---TEMPLATE-- -{% import _self as test %} -{% from _self import test %} - -{% macro test(a, b) -%} - {{ a|default('a') }}
    - {{- b|default('b') }}
    -{%- endmacro %} - -{{ test.test() }} -{{ test() }} -{{ test.test(1, "c") }} -{{ test(1, "c") }} ---DATA-- -return array(); ---EXPECT-- -a
    b
    -a
    b
    -1
    c
    -1
    c
    ---TEST-- -macro with a filter ---TEMPLATE-- -{% import _self as test %} - -{% macro test() %} - {% filter escape %}foo
    {% endfilter %} -{% endmacro %} - -{{ test.test() }} ---DATA-- -return array(); ---EXPECT-- -foo<br /> ---TEST-- -Twig outputs 0 nodes correctly ---TEMPLATE-- -{{ foo }}0{{ foo }} ---DATA-- -return array('foo' => 'foo') ---EXPECT-- -foo0foo ---TEST-- -error in twig extension ---TEMPLATE-- -{{ object.region is not null ? object.regionChoices[object.region] }} ---EXPECT-- -house.region.s ---TEST-- -Twig is able to deal with SimpleXMLElement instances as variables ---CONDITION-- -version_compare(phpversion(), '5.3.0', '>=') ---TEMPLATE-- -Hello '{{ images.image.0.group }}'! -{{ images.image.0.group.attributes.myattr }} -{{ images.children().image.count() }} -{% for image in images %} - - {{ image.group }} -{% endfor %} ---DATA-- -return array('images' => new SimpleXMLElement('foobar')) ---EXPECT-- -Hello 'foo'! -example -2 - - foo - - bar ---TEST-- -Twig does not confuse strings with integers in getAttribute() ---TEMPLATE-- -{{ hash['2e2'] }} ---DATA-- -return array('hash' => array('2e2' => 'works')) ---EXPECT-- -works ---TEST-- -"autoescape" tag applies escaping on its children ---TEMPLATE-- -{% autoescape %} -{{ var }}
    -{% endautoescape %} -{% autoescape 'html' %} -{{ var }}
    -{% endautoescape %} -{% autoescape false %} -{{ var }}
    -{% endautoescape %} -{% autoescape true %} -{{ var }}
    -{% endautoescape %} -{% autoescape false %} -{{ var }}
    -{% endautoescape %} ---DATA-- -return array('var' => '
    ') ---EXPECT-- -<br />
    -<br />
    -

    -<br />
    -

    ---TEST-- -"autoescape" tag applies escaping on embedded blocks ---TEMPLATE-- -{% autoescape 'html' %} - {% block foo %} - {{ var }} - {% endblock %} -{% endautoescape %} ---DATA-- -return array('var' => '
    ') ---EXPECT-- -<br /> ---TEST-- -"autoescape" tag does not double-escape ---TEMPLATE-- -{% autoescape 'html' %} -{{ var|escape }} -{% endautoescape %} ---DATA-- -return array('var' => '
    ') ---EXPECT-- -<br /> ---TEST-- -"autoescape" tag applies escaping after calling functions ---TEMPLATE-- - -autoescape false -{% autoescape false %} - -safe_br -{{ safe_br() }} - -unsafe_br -{{ unsafe_br() }} - -{% endautoescape %} - -autoescape 'html' -{% autoescape 'html' %} - -safe_br -{{ safe_br() }} - -unsafe_br -{{ unsafe_br() }} - -unsafe_br()|raw -{{ (unsafe_br())|raw }} - -safe_br()|escape -{{ (safe_br())|escape }} - -safe_br()|raw -{{ (safe_br())|raw }} - -unsafe_br()|escape -{{ (unsafe_br())|escape }} - -{% endautoescape %} - -autoescape js -{% autoescape 'js' %} - -safe_br -{{ safe_br() }} - -{% endautoescape %} ---DATA-- -return array() ---EXPECT-- - -autoescape false - -safe_br -
    - -unsafe_br -
    - - -autoescape 'html' - -safe_br -
    - -unsafe_br -<br /> - -unsafe_br()|raw -
    - -safe_br()|escape -<br /> - -safe_br()|raw -
    - -unsafe_br()|escape -<br /> - - -autoescape js - -safe_br -\x3Cbr\x20\x2F\x3E ---TEST-- -"autoescape" tag does not apply escaping on literals ---TEMPLATE-- -{% autoescape 'html' %} - -1. Simple literal -{{ "
    " }} - -2. Conditional expression with only literals -{{ true ? "
    " : "
    " }} - -3. Conditional expression with a variable -{{ true ? "
    " : someVar }} - -4. Nested conditionals with only literals -{{ true ? (true ? "
    " : "
    ") : "\n" }} - -5. Nested conditionals with a variable -{{ true ? (true ? "
    " : someVar) : "\n" }} - -6. Nested conditionals with a variable marked safe -{{ true ? (true ? "
    " : someVar|raw) : "\n" }} - -{% endautoescape %} ---DATA-- -return array() ---EXPECT-- - -1. Simple literal -
    - -2. Conditional expression with only literals -
    - -3. Conditional expression with a variable -<br /> - -4. Nested conditionals with only literals -
    - -5. Nested conditionals with a variable -<br /> - -6. Nested conditionals with a variable marked safe -
    ---TEST-- -"autoescape" tags can be nested at will ---TEMPLATE-- -{{ var }} -{% autoescape 'html' %} - {{ var }} - {% autoescape false %} - {{ var }} - {% autoescape 'html' %} - {{ var }} - {% endautoescape %} - {{ var }} - {% endautoescape %} - {{ var }} -{% endautoescape %} -{{ var }} ---DATA-- -return array('var' => '
    ') ---EXPECT-- -<br /> - <br /> -
    - <br /> -
    - <br /> -<br /> ---TEST-- -"autoescape" tag applies escaping to object method calls ---TEMPLATE-- -{% autoescape 'html' %} -{{ user.name }} -{{ user.name|lower }} -{{ user }} -{% endautoescape %} ---EXPECT-- -Fabien<br /> -fabien<br /> -Fabien<br /> ---TEST-- -"autoescape" tag does not escape when raw is used as a filter ---TEMPLATE-- -{% autoescape 'html' %} -{{ var|raw }} -{% endautoescape %} ---DATA-- -return array('var' => '
    ') ---EXPECT-- -
    ---TEST-- -"autoescape" tag accepts an escaping strategy ---TEMPLATE-- -{% autoescape true js %}{{ var }}{% endautoescape %} - -{% autoescape true html %}{{ var }}{% endautoescape %} - -{% autoescape 'js' %}{{ var }}{% endautoescape %} - -{% autoescape 'html' %}{{ var }}{% endautoescape %} ---DATA-- -return array('var' => '
    "') ---EXPECT-- -\x3Cbr\x20\x2F\x3E\x22 -<br />" -\x3Cbr\x20\x2F\x3E\x22 -<br />" ---TEST-- -escape types ---TEMPLATE-- - -1. autoescape 'html' |escape('js') - -{% autoescape 'html' %} - -{% endautoescape %} - -2. autoescape 'html' |escape('js') - -{% autoescape 'html' %} - -{% endautoescape %} - -3. autoescape 'js' |escape('js') - -{% autoescape 'js' %} - -{% endautoescape %} - -4. no escape - -{% autoescape false %} - -{% endautoescape %} - -5. |escape('js')|escape('html') - -{% autoescape false %} - -{% endautoescape %} - -6. autoescape 'html' |escape('js')|escape('html') - -{% autoescape 'html' %} - -{% endautoescape %} - ---DATA-- -return array('msg' => "<>\n'\"") ---EXPECT-- - -1. autoescape 'html' |escape('js') - - - -2. autoescape 'html' |escape('js') - - - -3. autoescape 'js' |escape('js') - - - -4. no escape - - - -5. |escape('js')|escape('html') - - - -6. autoescape 'html' |escape('js')|escape('html') - - - ---TEST-- -"autoescape" tag do not applies escaping on filter arguments ---TEMPLATE-- -{% autoescape 'html' %} -{{ var|nl2br("
    ") }} -{{ var|nl2br("
    "|escape) }} -{{ var|nl2br(sep) }} -{{ var|nl2br(sep|raw) }} -{{ var|nl2br(sep|escape) }} -{% endautoescape %} ---DATA-- -return array('var' => "\nTwig", 'sep' => '
    ') ---EXPECT-- -<Fabien>
    -Twig -<Fabien><br /> -Twig -<Fabien>
    -Twig -<Fabien>
    -Twig -<Fabien><br /> -Twig ---TEST-- -"autoescape" tag applies escaping after calling filters ---TEMPLATE-- -{% autoescape 'html' %} - -(escape_and_nl2br is an escaper filter) - -1. Don't escape escaper filter output -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is not escaped ) -{{ var|escape_and_nl2br }} - -2. Don't escape escaper filter output -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is not escaped, |raw is redundant ) -{{ var|escape_and_nl2br|raw }} - -3. Explicit escape -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is explicitly escaped by |escape ) -{{ var|escape_and_nl2br|escape }} - -4. Escape non-escaper filter output -( var is upper-cased by |upper, - the output is auto-escaped ) -{{ var|upper }} - -5. Escape if last filter is not an escaper -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is upper-cased by |upper, - the output is auto-escaped as |upper is not an escaper ) -{{ var|escape_and_nl2br|upper }} - -6. Don't escape escaper filter output -( var is upper cased by upper, - the output is escaped by |escape_and_nl2br, line-breaks are added, - the output is not escaped as |escape_and_nl2br is an escaper ) -{{ var|upper|escape_and_nl2br }} - -7. Escape if last filter is not an escaper -( the output of |format is "" ~ var ~ "", - the output is auto-escaped ) -{{ "%s"|format(var) }} - -8. Escape if last filter is not an escaper -( the output of |format is "" ~ var ~ "", - |raw is redundant, - the output is auto-escaped ) -{{ "%s"|raw|format(var) }} - -9. Don't escape escaper filter output -( the output of |format is "" ~ var ~ "", - the output is not escaped due to |raw filter at the end ) -{{ "%s"|format(var)|raw }} - -10. Don't escape escaper filter output -( the output of |format is "" ~ var ~ "", - the output is not escaped due to |raw filter at the end, - the |raw filter on var is redundant ) -{{ "%s"|format(var|raw)|raw }} - -{% endautoescape %} ---DATA-- -return array('var' => "\nTwig") ---EXPECT-- - -(escape_and_nl2br is an escaper filter) - -1. Don't escape escaper filter output -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is not escaped ) -<Fabien>
    -Twig - -2. Don't escape escaper filter output -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is not escaped, |raw is redundant ) -<Fabien>
    -Twig - -3. Explicit escape -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is explicitly escaped by |escape ) -&lt;Fabien&gt;<br /> -Twig - -4. Escape non-escaper filter output -( var is upper-cased by |upper, - the output is auto-escaped ) -<FABIEN> -TWIG - -5. Escape if last filter is not an escaper -( var is escaped by |escape_and_nl2br, line-breaks are added, - the output is upper-cased by |upper, - the output is auto-escaped as |upper is not an escaper ) -&LT;FABIEN&GT;<BR /> -TWIG - -6. Don't escape escaper filter output -( var is upper cased by upper, - the output is escaped by |escape_and_nl2br, line-breaks are added, - the output is not escaped as |escape_and_nl2br is an escaper ) -<FABIEN>
    -TWIG - -7. Escape if last filter is not an escaper -( the output of |format is "" ~ var ~ "", - the output is auto-escaped ) -<b><Fabien> -Twig</b> - -8. Escape if last filter is not an escaper -( the output of |format is "" ~ var ~ "", - |raw is redundant, - the output is auto-escaped ) -<b><Fabien> -Twig</b> - -9. Don't escape escaper filter output -( the output of |format is "" ~ var ~ "", - the output is not escaped due to |raw filter at the end ) - -Twig - -10. Don't escape escaper filter output -( the output of |format is "" ~ var ~ "", - the output is not escaped due to |raw filter at the end, - the |raw filter on var is redundant ) - -Twig ---TEST-- -"autoescape" tag applies escaping after calling filters, and before calling pre_escape filters ---TEMPLATE-- -{% autoescape 'html' %} - -(nl2br is pre_escaped for "html" and declared safe for "html") - -1. Pre-escape and don't post-escape -( var|escape|nl2br ) -{{ var|nl2br }} - -2. Don't double-pre-escape -( var|escape|nl2br ) -{{ var|escape|nl2br }} - -3. Don't escape safe values -( var|raw|nl2br ) -{{ var|raw|nl2br }} - -4. Don't escape safe values -( var|escape|nl2br|nl2br ) -{{ var|nl2br|nl2br }} - -5. Re-escape values that are escaped for an other contexts -( var|escape_something|escape|nl2br ) -{{ var|escape_something|nl2br }} - -6. Still escape when using filters not declared safe -( var|escape|nl2br|upper|escape ) -{{ var|nl2br|upper }} - -{% endautoescape %} ---DATA-- -return array('var' => "\nTwig") ---EXPECT-- - -(nl2br is pre_escaped for "html" and declared safe for "html") - -1. Pre-escape and don't post-escape -( var|escape|nl2br ) -<Fabien>
    -Twig - -2. Don't double-pre-escape -( var|escape|nl2br ) -<Fabien>
    -Twig - -3. Don't escape safe values -( var|raw|nl2br ) -
    -Twig - -4. Don't escape safe values -( var|escape|nl2br|nl2br ) -<Fabien>

    -Twig - -5. Re-escape values that are escaped for an other contexts -( var|escape_something|escape|nl2br ) -<FABIEN>
    -TWIG - -6. Still escape when using filters not declared safe -( var|escape|nl2br|upper|escape ) -&LT;FABIEN&GT;<BR /> -TWIG - ---TEST-- -"autoescape" tag handles filters preserving the safety ---TEMPLATE-- -{% autoescape 'html' %} - -(preserves_safety is preserving safety for "html") - -1. Unsafe values are still unsafe -( var|preserves_safety|escape ) -{{ var|preserves_safety }} - -2. Safe values are still safe -( var|escape|preserves_safety ) -{{ var|escape|preserves_safety }} - -3. Re-escape values that are escaped for an other contexts -( var|escape_something|preserves_safety|escape ) -{{ var|escape_something|preserves_safety }} - -4. Still escape when using filters not declared safe -( var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'})|escape ) -{{ var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'}) }} - -{% endautoescape %} ---DATA-- -return array('var' => "\nTwig") ---EXPECT-- - -(preserves_safety is preserving safety for "html") - -1. Unsafe values are still unsafe -( var|preserves_safety|escape ) -<FABIEN> -TWIG - -2. Safe values are still safe -( var|escape|preserves_safety ) -<FABIEN> -TWIG - -3. Re-escape values that are escaped for an other contexts -( var|escape_something|preserves_safety|escape ) -<FABIEN> -TWIG - -4. Still escape when using filters not declared safe -( var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'})|escape ) -&LT;FABPOT&GT; -TWIG - ---TEST-- -"block" tag ---TEMPLATE-- -{% block title1 %}FOO{% endblock %} -{% block title2 foo|lower %} ---TEMPLATE(foo.twig)-- -{% block content %}{% endblock %} ---DATA-- -return array('foo' => 'bar') ---EXPECT-- -FOObar ---TEST-- -"block" tag ---TEMPLATE-- -{% block content %} - {% block content %} - {% endblock %} -{% endblock %} ---DATA-- -return array() ---EXCEPTION-- -Twig_Error_Syntax: The block 'content' has already been defined line 2 in "index.twig" at line 3 ---TEST-- -"§" special chars in a block name ---TEMPLATE-- -{% block § %} -§ -{% endblock § %} ---DATA-- -return array() ---EXPECT-- -§ ---TEST-- -"embed" tag ---TEMPLATE-- -FOO -{% embed "foo.twig" %} - {% block c1 %} - {{ parent() }} - block1extended - {% endblock %} -{% endembed %} - -BAR ---TEMPLATE(foo.twig)-- -A -{% block c1 %} - block1 -{% endblock %} -B -{% block c2 %} - block2 -{% endblock %} -C ---DATA-- -return array() ---EXPECT-- -FOO - -A - block1 - - block1extended - B - block2 -C -BAR ---TEST-- -"embed" tag ---TEMPLATE(index.twig)-- -FOO -{% embed "foo.twig" %} - {% block c1 %} - {{ nothing }} - {% endblock %} -{% endembed %} -BAR ---TEMPLATE(foo.twig)-- -{% block c1 %}{% endblock %} ---DATA-- -return array() ---EXCEPTION-- -Twig_Error_Runtime: Variable "nothing" does not exist in "index.twig" at line 5 ---TEST-- -"embed" tag ---TEMPLATE-- -FOO -{% embed "foo.twig" %} - {% block c1 %} - {{ parent() }} - block1extended - {% endblock %} -{% endembed %} - -{% embed "foo.twig" %} - {% block c1 %} - {{ parent() }} - block1extended - {% endblock %} -{% endembed %} - -BAR ---TEMPLATE(foo.twig)-- -A -{% block c1 %} - block1 -{% endblock %} -B -{% block c2 %} - block2 -{% endblock %} -C ---DATA-- -return array() ---EXPECT-- -FOO - -A - block1 - - block1extended - B - block2 -C - -A - block1 - - block1extended - B - block2 -C -BAR ---TEST-- -"embed" tag ---TEMPLATE-- -{% embed "foo.twig" %} - {% block c1 %} - {{ parent() }} - {% embed "foo.twig" %} - {% block c1 %} - {{ parent() }} - block1extended - {% endblock %} - {% endembed %} - - {% endblock %} -{% endembed %} ---TEMPLATE(foo.twig)-- -A -{% block c1 %} - block1 -{% endblock %} -B -{% block c2 %} - block2 -{% endblock %} -C ---DATA-- -return array() ---EXPECT-- -A - block1 - - -A - block1 - - block1extended - B - block2 -C - B - block2 -C ---TEST-- -"embed" tag ---TEMPLATE-- -{% extends "base.twig" %} - -{% block c1 %} - {{ parent() }} - blockc1baseextended -{% endblock %} - -{% block c2 %} - {{ parent() }} - - {% embed "foo.twig" %} - {% block c1 %} - {{ parent() }} - block1extended - {% endblock %} - {% endembed %} -{% endblock %} ---TEMPLATE(base.twig)-- -A -{% block c1 %} - blockc1base -{% endblock %} -{% block c2 %} - blockc2base -{% endblock %} -B ---TEMPLATE(foo.twig)-- -A -{% block c1 %} - block1 -{% endblock %} -B -{% block c2 %} - block2 -{% endblock %} -C ---DATA-- -return array() ---EXPECT-- -A - blockc1base - - blockc1baseextended - blockc2base - - - -A - block1 - - block1extended - B - block2 -CB--TEST-- -"filter" tag applies a filter on its children ---TEMPLATE-- -{% filter upper %} -Some text with a {{ var }} -{% endfilter %} ---DATA-- -return array('var' => 'var') ---EXPECT-- -SOME TEXT WITH A VAR ---TEST-- -"filter" tag applies a filter on its children ---TEMPLATE-- -{% filter json_encode|raw %}test{% endfilter %} ---DATA-- -return array() ---EXPECT-- -"test" ---TEST-- -"filter" tags accept multiple chained filters ---TEMPLATE-- -{% filter lower|title %} - {{ var }} -{% endfilter %} ---DATA-- -return array('var' => 'VAR') ---EXPECT-- - Var ---TEST-- -"filter" tags can be nested at will ---TEMPLATE-- -{% filter lower|title %} - {{ var }} - {% filter upper %} - {{ var }} - {% endfilter %} - {{ var }} -{% endfilter %} ---DATA-- -return array('var' => 'var') ---EXPECT-- - Var - Var - Var ---TEST-- -"filter" tag applies the filter on "for" tags ---TEMPLATE-- -{% filter upper %} -{% for item in items %} -{{ item }} -{% endfor %} -{% endfilter %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- -A -B ---TEST-- -"filter" tag applies the filter on "if" tags ---TEMPLATE-- -{% filter upper %} -{% if items %} -{{ items|join(', ') }} -{% endif %} - -{% if items.3 is defined %} -FOO -{% else %} -{{ items.1 }} -{% endif %} - -{% if items.3 is defined %} -FOO -{% elseif items.1 %} -{{ items.0 }} -{% endif %} - -{% endfilter %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- -A, B - -B - -A ---TEST-- -"for" tag takes a condition ---TEMPLATE-- -{% for i in 1..5 if i is odd -%} - {{ loop.index }}.{{ i }}{{ foo.bar }} -{% endfor %} ---DATA-- -return array('foo' => array('bar' => 'X')) ---CONFIG-- -return array('strict_variables' => false) ---EXPECT-- -1.1X -2.3X -3.5X ---TEST-- -"for" tag keeps the context safe ---TEMPLATE-- -{% for item in items %} - {% for item in items %} - * {{ item }} - {% endfor %} - * {{ item }} -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- - * a - * b - * a - * a - * b - * b ---TEST-- -"for" tag can use an "else" clause ---TEMPLATE-- -{% for item in items %} - * {{ item }} -{% else %} - no item -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- - * a - * b ---DATA-- -return array('items' => array()) ---EXPECT-- - no item ---DATA-- -return array() ---CONFIG-- -return array('strict_variables' => false) ---EXPECT-- - no item ---TEST-- -"for" tag does not reset inner variables ---TEMPLATE-- -{% for i in 1..2 %} - {% for j in 0..2 %} - {{k}}{% set k = k+1 %} {{ loop.parent.loop.index }} - {% endfor %} -{% endfor %} ---DATA-- -return array('k' => 0) ---EXPECT-- - 0 1 - 1 1 - 2 1 - 3 2 - 4 2 - 5 2 ---TEST-- -"for" tag can iterate over keys and values ---TEMPLATE-- -{% for key, item in items %} - * {{ key }}/{{ item }} -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- - * 0/a - * 1/b ---TEST-- -"for" tag can iterate over keys ---TEMPLATE-- -{% for key in items|keys %} - * {{ key }} -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- - * 0 - * 1 ---TEST-- -"for" tag adds a loop variable to the context locally ---TEMPLATE-- -{% for item in items %} -{% endfor %} -{% if loop is not defined %}WORKS{% endif %} ---DATA-- -return array('items' => array()) ---EXPECT-- -WORKS ---TEST-- -"for" tag adds a loop variable to the context ---TEMPLATE-- -{% for item in items %} - * {{ loop.index }}/{{ loop.index0 }} - * {{ loop.revindex }}/{{ loop.revindex0 }} - * {{ loop.first }}/{{ loop.last }}/{{ loop.length }} - -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- - * 1/0 - * 2/1 - * 1//2 - - * 2/1 - * 1/0 - * /1/2 ---TEST-- -"for" tag ---TEMPLATE-- -{% for i, item in items if loop.last > 0 %} -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXCEPTION-- -Twig_Error_Syntax: The "loop" variable cannot be used in a looping condition in "index.twig" at line 2 ---TEST-- -"for" tag ---TEMPLATE-- -{% for i, item in items if i > 0 %} - {{ loop.last }} -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXCEPTION-- -Twig_Error_Syntax: The "loop.last" variable is not defined when looping with a condition in "index.twig" at line 3 ---TEST-- -"for" tag can use an "else" clause ---TEMPLATE-- -{% for item in items %} - {% for item in items1 %} - * {{ item }} - {% else %} - no {{ item }} - {% endfor %} -{% else %} - no item1 -{% endfor %} ---DATA-- -return array('items' => array('a', 'b'), 'items1' => array()) ---EXPECT-- -no a - no b ---TEST-- -"for" tag iterates over iterable and countable objects ---TEMPLATE-- -{% for item in items %} - * {{ item }} - * {{ loop.index }}/{{ loop.index0 }} - * {{ loop.revindex }}/{{ loop.revindex0 }} - * {{ loop.first }}/{{ loop.last }}/{{ loop.length }} - -{% endfor %} - -{% for key, value in items %} - * {{ key }}/{{ value }} -{% endfor %} - -{% for key in items|keys %} - * {{ key }} -{% endfor %} ---DATA-- -class ItemsIteratorCountable implements Iterator, Countable -{ - protected $values = array('foo' => 'bar', 'bar' => 'foo'); - public function current() { return current($this->values); } - public function key() { return key($this->values); } - public function next() { return next($this->values); } - public function rewind() { return reset($this->values); } - public function valid() { return false !== current($this->values); } - public function count() { return count($this->values); } -} -return array('items' => new ItemsIteratorCountable()) ---EXPECT-- - * bar - * 1/0 - * 2/1 - * 1//2 - - * foo - * 2/1 - * 1/0 - * /1/2 - - - * foo/bar - * bar/foo - - * foo - * bar ---TEST-- -"for" tag iterates over iterable objects ---TEMPLATE-- -{% for item in items %} - * {{ item }} - * {{ loop.index }}/{{ loop.index0 }} - * {{ loop.first }} - -{% endfor %} - -{% for key, value in items %} - * {{ key }}/{{ value }} -{% endfor %} - -{% for key in items|keys %} - * {{ key }} -{% endfor %} ---DATA-- -class ItemsIterator implements Iterator -{ - protected $values = array('foo' => 'bar', 'bar' => 'foo'); - public function current() { return current($this->values); } - public function key() { return key($this->values); } - public function next() { return next($this->values); } - public function rewind() { return reset($this->values); } - public function valid() { return false !== current($this->values); } -} -return array('items' => new ItemsIterator()) ---EXPECT-- - * bar - * 1/0 - * 1 - - * foo - * 2/1 - * - - - * foo/bar - * bar/foo - - * foo - * bar ---TEST-- -"for" tags can be nested ---TEMPLATE-- -{% for key, item in items %} -* {{ key }} ({{ loop.length }}): -{% for value in item %} - * {{ value }} ({{ loop.length }}) -{% endfor %} -{% endfor %} ---DATA-- -return array('items' => array('a' => array('a1', 'a2', 'a3'), 'b' => array('b1'))) ---EXPECT-- -* a (2): - * a1 (3) - * a2 (3) - * a3 (3) -* b (2): - * b1 (1) ---TEST-- -"for" tag iterates over item values ---TEMPLATE-- -{% for item in items %} - * {{ item }} -{% endfor %} ---DATA-- -return array('items' => array('a', 'b')) ---EXPECT-- - * a - * b ---TEST-- -global variables ---TEMPLATE-- -{% include "included.twig" %} -{% from "included.twig" import foobar %} -{{ foobar() }} ---TEMPLATE(included.twig)-- -{% macro foobar() %} -called foobar -{% endmacro %} ---DATA-- -return array(); ---EXPECT-- -called foobar ---TEST-- -"if" creates a condition ---TEMPLATE-- -{% if a is defined %} - {{ a }} -{% elseif b is defined %} - {{ b }} -{% else %} - NOTHING -{% endif %} ---DATA-- -return array('a' => 'a') ---EXPECT-- - a ---DATA-- -return array('b' => 'b') ---EXPECT-- - b ---DATA-- -return array() ---EXPECT-- - NOTHING ---TEST-- -"if" takes an expression as a test ---TEMPLATE-- -{% if a < 2 %} - A1 -{% elseif a > 10 %} - A2 -{% else %} - A3 -{% endif %} ---DATA-- -return array('a' => 1) ---EXPECT-- - A1 ---DATA-- -return array('a' => 12) ---EXPECT-- - A2 ---DATA-- -return array('a' => 7) ---EXPECT-- - A3 ---TEST-- -"include" tag ---TEMPLATE-- -FOO -{% include "foo.twig" %} - -BAR ---TEMPLATE(foo.twig)-- -FOOBAR ---DATA-- -return array() ---EXPECT-- -FOO - -FOOBAR -BAR ---TEST-- -"include" tag allows expressions for the template to include ---TEMPLATE-- -FOO -{% include foo %} - -BAR ---TEMPLATE(foo.twig)-- -FOOBAR ---DATA-- -return array('foo' => 'foo.twig') ---EXPECT-- -FOO - -FOOBAR -BAR ---TEST-- -"include" tag ---TEMPLATE-- -{% include ["foo.twig", "bar.twig"] ignore missing %} -{% include "foo.twig" ignore missing %} -{% include "foo.twig" ignore missing with {} %} -{% include "foo.twig" ignore missing with {} only %} ---DATA-- -return array() ---EXPECT-- ---TEST-- -"include" tag ---TEMPLATE-- -{% extends "base.twig" %} - -{% block content %} - {{ parent() }} -{% endblock %} ---TEMPLATE(base.twig)-- -{% block content %} - {% include "foo.twig" %} -{% endblock %} ---DATA-- -return array(); ---EXCEPTION-- -Twig_Error_Loader: Template "foo.twig" is not defined in "base.twig" at line 3. ---TEST-- -"include" tag ---TEMPLATE-- -{% include "foo.twig" %} ---DATA-- -return array(); ---EXCEPTION-- -Twig_Error_Loader: Template "foo.twig" is not defined in "index.twig" at line 2. ---TEST-- -"include" tag accept variables and only ---TEMPLATE-- -{% include "foo.twig" %} -{% include "foo.twig" only %} -{% include "foo.twig" with {'foo1': 'bar'} %} -{% include "foo.twig" with {'foo1': 'bar'} only %} ---TEMPLATE(foo.twig)-- -{% for k, v in _context %}{{ k }},{% endfor %} ---DATA-- -return array('foo' => 'bar') ---EXPECT-- -foo,global,_parent, -global,_parent, -foo,global,foo1,_parent, -foo1,global,_parent, ---TEST-- -"include" tag accepts Twig_Template instance ---TEMPLATE-- -{% include foo %} FOO ---TEMPLATE(foo.twig)-- -BAR ---DATA-- -return array('foo' => $twig->loadTemplate('foo.twig')) ---EXPECT-- -BAR FOO ---TEST-- -"include" tag ---TEMPLATE-- -{% include ["foo.twig", "bar.twig"] %} -{% include ["bar.twig", "foo.twig"] %} ---TEMPLATE(foo.twig)-- -foo ---DATA-- -return array() ---EXPECT-- -foo -foo ---TEST-- -"include" tag accept variables ---TEMPLATE-- -{% include "foo.twig" with {'foo': 'bar'} %} -{% include "foo.twig" with vars %} ---TEMPLATE(foo.twig)-- -{{ foo }} ---DATA-- -return array('vars' => array('foo' => 'bar')) ---EXPECT-- -bar -bar ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends "foo.twig" %} - -{% block content %} -FOO -{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %}{% endblock %} ---DATA-- -return array() ---EXPECT-- -FOO ---TEST-- -block_expr2 ---TEMPLATE-- -{% extends "base2.twig" %} - -{% block element -%} - Element: - {{- parent() -}} -{% endblock %} ---TEMPLATE(base2.twig)-- -{% extends "base.twig" %} ---TEMPLATE(base.twig)-- -{% spaceless %} -{% block element -%} -
    - {%- if item.children is defined %} - {%- for item in item.children %} - {{- block('element') -}} - {% endfor %} - {%- endif -%} -
    -{%- endblock %} -{% endspaceless %} ---DATA-- -return array( - 'item' => array( - 'children' => array( - null, - null, - ) - ) -) ---EXPECT-- -Element:
    Element:
    Element:
    ---TEST-- -block_expr ---TEMPLATE-- -{% extends "base.twig" %} - -{% block element -%} - Element: - {{- parent() -}} -{% endblock %} ---TEMPLATE(base.twig)-- -{% spaceless %} -{% block element -%} -
    - {%- if item.children is defined %} - {%- for item in item.children %} - {{- block('element') -}} - {% endfor %} - {%- endif -%} -
    -{%- endblock %} -{% endspaceless %} ---DATA-- -return array( - 'item' => array( - 'children' => array( - null, - null, - ) - ) -) ---EXPECT-- -Element:
    Element:
    Element:
    ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends standalone ? foo : 'bar.twig' %} - -{% block content %}{{ parent() }}FOO{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %}FOO{% endblock %} ---TEMPLATE(bar.twig)-- -{% block content %}BAR{% endblock %} ---DATA-- -return array('foo' => 'foo.twig', 'standalone' => true) ---EXPECT-- -FOOFOO ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends foo %} - -{% block content %} -FOO -{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %}{% endblock %} ---DATA-- -return array('foo' => 'foo.twig') ---EXPECT-- -FOO ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends "foo.twig" %} ---TEMPLATE(foo.twig)-- -{% block content %}FOO{% endblock %} ---DATA-- -return array() ---EXPECT-- -FOO ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends ["foo.twig", "bar.twig"] %} ---TEMPLATE(bar.twig)-- -{% block content %} -foo -{% endblock %} ---DATA-- -return array() ---EXPECT-- -foo ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends "layout.twig" %}{% block content %}{{ parent() }}index {% endblock %} ---TEMPLATE(layout.twig)-- -{% extends "base.twig" %}{% block content %}{{ parent() }}layout {% endblock %} ---TEMPLATE(base.twig)-- -{% block content %}base {% endblock %} ---DATA-- -return array() ---EXPECT-- -base layout index ---TEST-- -"block" tag ---TEMPLATE-- -{% block content %} - CONTENT - {%- block subcontent -%} - SUBCONTENT - {%- endblock -%} - ENDCONTENT -{% endblock %} ---TEMPLATE(foo.twig)-- ---DATA-- -return array() ---EXPECT-- -CONTENTSUBCONTENTENDCONTENT ---TEST-- -"block" tag ---TEMPLATE-- -{% extends "foo.twig" %} - -{% block content %} - {% block subcontent %} - {% block subsubcontent %} - SUBSUBCONTENT - {% endblock %} - {% endblock %} -{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %} - {% block subcontent %} - SUBCONTENT - {% endblock %} -{% endblock %} ---DATA-- -return array() ---EXPECT-- -SUBSUBCONTENT ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends "layout.twig" %} -{% block inside %}INSIDE{% endblock inside %} ---TEMPLATE(layout.twig)-- -{% extends "base.twig" %} -{% block body %} - {% block inside '' %} -{% endblock body %} ---TEMPLATE(base.twig)-- -{% block body '' %} ---DATA-- -return array() ---EXPECT-- -INSIDE ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends foo ? 'foo.twig' : 'bar.twig' %} ---TEMPLATE(foo.twig)-- -FOO ---TEMPLATE(bar.twig)-- -BAR ---DATA-- -return array('foo' => true) ---EXPECT-- -FOO ---DATA-- -return array('foo' => false) ---EXPECT-- -BAR ---TEST-- -"extends" tag ---TEMPLATE-- -{% block content %} - {% extends "foo.twig" %} -{% endblock %} ---EXCEPTION-- -Twig_Error_Syntax: Cannot extend from a block in "index.twig" at line 3 ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends "base.twig" %} -{% block content %}{% include "included.twig" %}{% endblock %} - -{% block footer %}Footer{% endblock %} ---TEMPLATE(included.twig)-- -{% extends "base.twig" %} -{% block content %}Included Content{% endblock %} ---TEMPLATE(base.twig)-- -{% block content %}Default Content{% endblock %} - -{% block footer %}Default Footer{% endblock %} ---DATA-- -return array() ---EXPECT-- -Included Content -Default Footer -Footer ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends "foo.twig" %} - -{% block content %} - {% block inside %} - INSIDE OVERRIDDEN - {% endblock %} - - BEFORE - {{ parent() }} - AFTER -{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %} - BAR -{% endblock %} ---DATA-- -return array() ---EXPECT-- - -INSIDE OVERRIDDEN - - BEFORE - BAR - - AFTER ---TEST-- -"extends" tag ---TEMPLATE-- -{% extends "foo.twig" %} - -{% block content %}{{ parent() }}FOO{{ parent() }}{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %}BAR{% endblock %} ---DATA-- -return array() ---EXPECT-- -BARFOOBAR ---TEST-- -"parent" tag ---TEMPLATE-- -{% use 'foo.twig' %} - -{% block content %} - {{ parent() }} -{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %}BAR{% endblock %} ---DATA-- -return array() ---EXPECT-- -BAR ---TEST-- -"parent" tag ---TEMPLATE-- -{% block content %} - {{ parent() }} -{% endblock %} ---EXCEPTION-- -Twig_Error_Syntax: Calling "parent" on a template that does not extend nor "use" another template is forbidden in "index.twig" at line 3 ---TEST-- -"extends" tag accepts Twig_Template instance ---TEMPLATE-- -{% extends foo %} - -{% block content %} -{{ parent() }}FOO -{% endblock %} ---TEMPLATE(foo.twig)-- -{% block content %}BAR{% endblock %} ---DATA-- -return array('foo' => $twig->loadTemplate('foo.twig')) ---EXPECT-- -BARFOO ---TEST-- -"parent" function ---TEMPLATE-- -{% extends "parent.twig" %} - -{% use "use1.twig" %} -{% use "use2.twig" %} - -{% block content_parent %} - {{ parent() }} -{% endblock %} - -{% block content_use1 %} - {{ parent() }} -{% endblock %} - -{% block content_use2 %} - {{ parent() }} -{% endblock %} - -{% block content %} - {{ block('content_use1_only') }} - {{ block('content_use2_only') }} -{% endblock %} ---TEMPLATE(parent.twig)-- -{% block content_parent 'content_parent' %} -{% block content_use1 'content_parent' %} -{% block content_use2 'content_parent' %} -{% block content '' %} ---TEMPLATE(use1.twig)-- -{% block content_use1 'content_use1' %} -{% block content_use2 'content_use1' %} -{% block content_use1_only 'content_use1_only' %} ---TEMPLATE(use2.twig)-- -{% block content_use2 'content_use2' %} -{% block content_use2_only 'content_use2_only' %} ---DATA-- -return array() ---EXPECT-- - content_parent - content_use1 - content_use2 - content_use1_only - content_use2_only ---TEST-- -"macro" tag ---TEMPLATE-- -{% import _self as macros %} - -{{ macros.input('username') }} -{{ macros.input('password', null, 'password', 1) }} - -{% macro input(name, value, type, size) %} - -{% endmacro %} ---DATA-- -return array() ---EXPECT-- - - - ---TEST-- -"macro" tag supports name for endmacro ---TEMPLATE-- -{% import _self as macros %} - -{{ macros.foo() }} -{{ macros.bar() }} - -{% macro foo() %}foo{% endmacro %} -{% macro bar() %}bar{% endmacro bar %} ---DATA-- -return array() ---EXPECT-- -foo -bar - ---TEST-- -"macro" tag ---TEMPLATE-- -{% import 'forms.twig' as forms %} - -{{ forms.input('username') }} -{{ forms.input('password', null, 'password', 1) }} ---TEMPLATE(forms.twig)-- -{% macro input(name, value, type, size) %} - -{% endmacro %} ---DATA-- -return array() ---EXPECT-- - - - ---TEST-- -"macro" tag ---TEMPLATE-- -{% from 'forms.twig' import foo %} -{% from 'forms.twig' import foo as foobar, bar %} - -{{ foo('foo') }} -{{ foobar('foo') }} -{{ bar('foo') }} ---TEMPLATE(forms.twig)-- -{% macro foo(name) %}foo{{ name }}{% endmacro %} -{% macro bar(name) %}bar{{ name }}{% endmacro %} ---DATA-- -return array() ---EXPECT-- -foofoo -foofoo -barfoo ---TEST-- -"macro" tag ---TEMPLATE-- -{% from 'forms.twig' import foo %} - -{{ foo('foo') }} -{{ foo() }} ---TEMPLATE(forms.twig)-- -{% macro foo(name) %}{{ name|default('foo') }}{{ global }}{% endmacro %} ---DATA-- -return array() ---EXPECT-- -fooglobal -fooglobal ---TEST-- -"macro" tag ---TEMPLATE-- -{% import _self as forms %} - -{{ forms.input('username') }} -{{ forms.input('password', null, 'password', 1) }} - -{% macro input(name, value, type, size) %} - -{% endmacro %} ---DATA-- -return array() ---EXPECT-- - - - ---TEST-- -"raw" tag ---TEMPLATE-- -{% raw %} -{{ foo }} -{% endraw %} ---DATA-- -return array() ---EXPECT-- -{{ foo }} ---TEST-- -"raw" tag ---TEMPLATE-- -{% raw %} -{{ foo }} -{% endverbatim %} ---DATA-- -return array() ---EXCEPTION-- -Twig_Error_Syntax: Unexpected end of file: Unclosed "raw" block in "index.twig" at line 2 ---TEST-- -"raw" tag ---TEMPLATE-- -1*** - -{%- raw %} - {{ 'bla' }} -{% endraw %} - -1*** -2*** - -{%- raw -%} - {{ 'bla' }} -{% endraw %} - -2*** -3*** - -{%- raw -%} - {{ 'bla' }} -{% endraw -%} - -3*** -4*** - -{%- raw -%} - {{ 'bla' }} -{%- endraw %} - -4*** -5*** - -{%- raw -%} - {{ 'bla' }} -{%- endraw -%} - -5*** ---DATA-- -return array() ---EXPECT-- -1*** - {{ 'bla' }} - - -1*** -2***{{ 'bla' }} - - -2*** -3***{{ 'bla' }} -3*** -4***{{ 'bla' }} - -4*** -5***{{ 'bla' }}5*** ---TEST-- -sandbox tag ---TEMPLATE-- -{%- sandbox %} - {%- include "foo.twig" %} - a -{%- endsandbox %} ---TEMPLATE(foo.twig)-- -foo ---EXCEPTION-- -Twig_Error_Syntax: Only "include" tags are allowed within a "sandbox" section in "index.twig" at line 4 ---TEST-- -sandbox tag ---TEMPLATE-- -{%- sandbox %} - {%- include "foo.twig" %} - - {% if 1 %} - {%- include "foo.twig" %} - {% endif %} -{%- endsandbox %} ---TEMPLATE(foo.twig)-- -foo ---EXCEPTION-- -Twig_Error_Syntax: Only "include" tags are allowed within a "sandbox" section in "index.twig" at line 5 ---TEST-- -sandbox tag ---TEMPLATE-- -{%- sandbox %} - {%- include "foo.twig" %} -{%- endsandbox %} - -{%- sandbox %} - {%- include "foo.twig" %} - {%- include "foo.twig" %} -{%- endsandbox %} - -{%- sandbox %}{% include "foo.twig" %}{% endsandbox %} ---TEMPLATE(foo.twig)-- -foo ---DATA-- -return array() ---EXPECT-- -foo -foo -foo -foo ---TEST-- -"set" tag ---TEMPLATE-- -{% set foo = 'foo' %} -{% set bar = 'foo
    ' %} - -{{ foo }} -{{ bar }} - -{% set foo, bar = 'foo', 'bar' %} - -{{ foo }}{{ bar }} ---DATA-- -return array() ---EXPECT-- -foo -foo<br /> - - -foobar ---TEST-- -"set" tag block empty capture ---TEMPLATE-- -{% set foo %}{% endset %} - -{% if foo %}FAIL{% endif %} ---DATA-- -return array() ---EXPECT-- ---TEST-- -"set" tag block capture ---TEMPLATE-- -{% set foo %}f
    o
    o{% endset %} - -{{ foo }} ---DATA-- -return array() ---EXPECT-- -f
    o
    o ---TEST-- -"set" tag ---TEMPLATE-- -{% set foo, bar = 'foo' ~ 'bar', 'bar' ~ 'foo' %} - -{{ foo }} -{{ bar }} ---DATA-- -return array() ---EXPECT-- -foobar -barfoo ---TEST-- -"spaceless" tag removes whites between HTML tags ---TEMPLATE-- -{% spaceless %} - -
    foo
    - -{% endspaceless %} ---DATA-- -return array() ---EXPECT-- -
    foo
    ---TEST-- -"§" custom tag ---TEMPLATE-- -{% § %} ---DATA-- -return array() ---EXPECT-- -§ ---TEST-- -Whitespace trimming on tags. ---TEMPLATE-- -{{ 5 * '{#-'|length }} -{{ '{{-'|length * 5 + '{%-'|length }} - -Trim on control tag: -{% for i in range(1, 9) -%} - {{ i }} -{%- endfor %} - - -Trim on output tag: -{% for i in range(1, 9) %} - {{- i -}} -{% endfor %} - - -Trim comments: - -{#- Invisible -#} - -After the comment. - -Trim leading space: -{% if leading %} - - {{- leading }} -{% endif %} - -{%- if leading %} - {{- leading }} - -{%- endif %} - - -Trim trailing space: -{% if trailing -%} - {{ trailing -}} - -{% endif -%} - -Combined: - -{%- if both -%} -
      -
    • {{- both -}}
    • -
    - -{%- endif -%} - -end ---DATA-- -return array('leading' => 'leading space', 'trailing' => 'trailing space', 'both' => 'both') ---EXPECT-- -15 -18 - -Trim on control tag: -123456789 - -Trim on output tag: -123456789 - -Trim comments:After the comment. - -Trim leading space: -leading space -leading space - -Trim trailing space: -trailing spaceCombined:
      -
    • both
    • -
    end ---TEST-- -"use" tag ---TEMPLATE-- -{% use "blocks.twig" with content as foo %} - -{{ block('foo') }} ---TEMPLATE(blocks.twig)-- -{% block content 'foo' %} ---DATA-- -return array() ---EXPECT-- -foo ---TEST-- -"use" tag ---TEMPLATE-- -{% use "blocks.twig" %} - -{{ block('content') }} ---TEMPLATE(blocks.twig)-- -{% block content 'foo' %} ---DATA-- -return array() ---EXPECT-- -foo ---TEST-- -"use" tag ---TEMPLATE-- -{% use "foo.twig" %} ---TEMPLATE(foo.twig)-- -{% use "bar.twig" %} ---TEMPLATE(bar.twig)-- ---DATA-- -return array() ---EXPECT-- ---TEST-- -"use" tag ---TEMPLATE-- -{% use "foo.twig" %} - -{{ block('content') }} -{{ block('foo') }} -{{ block('bar') }} ---TEMPLATE(foo.twig)-- -{% use "bar.twig" %} - -{% block content 'foo' %} -{% block foo 'foo' %} ---TEMPLATE(bar.twig)-- -{% block content 'bar' %} -{% block bar 'bar' %} ---DATA-- -return array() ---EXPECT-- -foo -foo -bar ---TEST-- -"use" tag ---TEMPLATE-- -{% use "ancestor.twig" %} -{% use "parent.twig" %} - -{{ block('container') }} ---TEMPLATE(parent.twig)-- -{% block sub_container %} -
    overriden sub_container
    -{% endblock %} ---TEMPLATE(ancestor.twig)-- -{% block container %} -
    {{ block('sub_container') }}
    -{% endblock %} - -{% block sub_container %} -
    sub_container
    -{% endblock %} ---DATA-- -return array() ---EXPECT-- -
    overriden sub_container
    -
    ---TEST-- -"use" tag ---TEMPLATE-- -{% use "parent.twig" %} - -{{ block('container') }} ---TEMPLATE(parent.twig)-- -{% use "ancestor.twig" %} - -{% block sub_container %} -
    overriden sub_container
    -{% endblock %} ---TEMPLATE(ancestor.twig)-- -{% block container %} -
    {{ block('sub_container') }}
    -{% endblock %} - -{% block sub_container %} -
    sub_container
    -{% endblock %} ---DATA-- -return array() ---EXPECT-- -
    overriden sub_container
    -
    ---TEST-- -"use" tag ---TEMPLATE-- -{% use "foo.twig" with content as foo_content %} -{% use "bar.twig" %} - -{{ block('content') }} -{{ block('foo') }} -{{ block('bar') }} -{{ block('foo_content') }} ---TEMPLATE(foo.twig)-- -{% block content 'foo' %} -{% block foo 'foo' %} ---TEMPLATE(bar.twig)-- -{% block content 'bar' %} -{% block bar 'bar' %} ---DATA-- -return array() ---EXPECT-- -bar -foo -bar -foo ---TEST-- -"use" tag ---TEMPLATE-- -{% use "foo.twig" %} -{% use "bar.twig" %} - -{{ block('content') }} -{{ block('foo') }} -{{ block('bar') }} ---TEMPLATE(foo.twig)-- -{% block content 'foo' %} -{% block foo 'foo' %} ---TEMPLATE(bar.twig)-- -{% block content 'bar' %} -{% block bar 'bar' %} ---DATA-- -return array() ---EXPECT-- -bar -foo -bar ---TEST-- -"use" tag ---TEMPLATE-- -{% use 'file2.html.twig'%} -{% block foobar %} - {{- parent() -}} - Content of block (second override) -{% endblock foobar %} ---TEMPLATE(file2.html.twig)-- -{% use 'file1.html.twig' %} -{% block foobar %} - {{- parent() -}} - Content of block (first override) -{% endblock foobar %} ---TEMPLATE(file1.html.twig)-- -{% block foobar -%} - Content of block -{% endblock foobar %} ---DATA-- -return array() ---EXPECT-- -Content of block -Content of block (first override) -Content of block (second override) ---TEST-- -"use" tag ---TEMPLATE-- -{% use 'file2.html.twig' %} -{% use 'file1.html.twig' with foo %} -{% block foo %} - {{- parent() -}} - Content of foo (second override) -{% endblock foo %} -{% block bar %} - {{- parent() -}} - Content of bar (second override) -{% endblock bar %} ---TEMPLATE(file2.html.twig)-- -{% use 'file1.html.twig' %} -{% block foo %} - {{- parent() -}} - Content of foo (first override) -{% endblock foo %} -{% block bar %} - {{- parent() -}} - Content of bar (first override) -{% endblock bar %} ---TEMPLATE(file1.html.twig)-- -{% block foo -%} - Content of foo -{% endblock foo %} -{% block bar -%} - Content of bar -{% endblock bar %} ---DATA-- -return array() ---EXPECT-- -Content of foo -Content of foo (first override) -Content of foo (second override) -Content of bar -Content of bar (second override) ---TEST-- -"use" tag ---TEMPLATE-- -{% use 'file2.html.twig' with foobar as base_base_foobar %} -{% block foobar %} - {{- block('base_base_foobar') -}} - Content of block (second override) -{% endblock foobar %} ---TEMPLATE(file2.html.twig)-- -{% use 'file1.html.twig' with foobar as base_foobar %} -{% block foobar %} - {{- block('base_foobar') -}} - Content of block (first override) -{% endblock foobar %} ---TEMPLATE(file1.html.twig)-- -{% block foobar -%} - Content of block -{% endblock foobar %} ---DATA-- -return array() ---EXPECT-- -Content of block -Content of block (first override) -Content of block (second override) ---TEST-- -"verbatim" tag ---TEMPLATE-- -{% verbatim %} -{{ foo }} -{% endverbatim %} ---DATA-- -return array() ---EXPECT-- -{{ foo }} ---TEST-- -"verbatim" tag ---TEMPLATE-- -{% verbatim %} -{{ foo }} -{% endraw %} ---DATA-- -return array() ---EXCEPTION-- -Twig_Error_Syntax: Unexpected end of file: Unclosed "verbatim" block in "index.twig" at line 2 ---TEST-- -"verbatim" tag ---TEMPLATE-- -1*** - -{%- verbatim %} - {{ 'bla' }} -{% endverbatim %} - -1*** -2*** - -{%- verbatim -%} - {{ 'bla' }} -{% endverbatim %} - -2*** -3*** - -{%- verbatim -%} - {{ 'bla' }} -{% endverbatim -%} - -3*** -4*** - -{%- verbatim -%} - {{ 'bla' }} -{%- endverbatim %} - -4*** -5*** - -{%- verbatim -%} - {{ 'bla' }} -{%- endverbatim -%} - -5*** ---DATA-- -return array() ---EXPECT-- -1*** - {{ 'bla' }} - - -1*** -2***{{ 'bla' }} - - -2*** -3***{{ 'bla' }} -3*** -4***{{ 'bla' }} - -4*** -5***{{ 'bla' }}5*** ---TEST-- -array index test ---TEMPLATE-- -{% for key, value in days %} -{{ key }} -{% endfor %} ---DATA-- -return array('days' => array( - 1 => array('money' => 9), - 2 => array('money' => 21), - 3 => array('money' => 38), - 4 => array('money' => 6), - 18 => array('money' => 6), - 19 => array('money' => 3), - 31 => array('money' => 11), -)); ---EXPECT-- -1 -2 -3 -4 -18 -19 -31 ---TEST-- -"const" test ---TEMPLATE-- -{{ 8 is constant('E_NOTICE') ? 'ok' : 'no' }} -{{ 'bar' is constant('TwigTestFoo::BAR_NAME') ? 'ok' : 'no' }} -{{ value is constant('TwigTestFoo::BAR_NAME') ? 'ok' : 'no' }} -{{ 2 is constant('ARRAY_AS_PROPS', object) ? 'ok' : 'no' }} ---DATA-- -return array('value' => 'bar', 'object' => new ArrayObject(array('hi'))); ---EXPECT-- -ok -ok -ok -ok--TEST-- -"defined" test ---TEMPLATE-- -{{ definedVar is defined ? 'ok' : 'ko' }} -{{ definedVar is not defined ? 'ko' : 'ok' }} -{{ undefinedVar is defined ? 'ko' : 'ok' }} -{{ undefinedVar is not defined ? 'ok' : 'ko' }} -{{ zeroVar is defined ? 'ok' : 'ko' }} -{{ nullVar is defined ? 'ok' : 'ko' }} -{{ nested.definedVar is defined ? 'ok' : 'ko' }} -{{ nested['definedVar'] is defined ? 'ok' : 'ko' }} -{{ nested.definedVar is not defined ? 'ko' : 'ok' }} -{{ nested.undefinedVar is defined ? 'ko' : 'ok' }} -{{ nested['undefinedVar'] is defined ? 'ko' : 'ok' }} -{{ nested.undefinedVar is not defined ? 'ok' : 'ko' }} -{{ nested.zeroVar is defined ? 'ok' : 'ko' }} -{{ nested.nullVar is defined ? 'ok' : 'ko' }} -{{ nested.definedArray.0 is defined ? 'ok' : 'ko' }} -{{ nested['definedArray'][0] is defined ? 'ok' : 'ko' }} -{{ object.foo is defined ? 'ok' : 'ko' }} -{{ object.undefinedMethod is defined ? 'ko' : 'ok' }} -{{ object.getFoo() is defined ? 'ok' : 'ko' }} -{{ object.getFoo('a') is defined ? 'ok' : 'ko' }} -{{ object.undefinedMethod() is defined ? 'ko' : 'ok' }} -{{ object.undefinedMethod('a') is defined ? 'ko' : 'ok' }} -{{ object.self.foo is defined ? 'ok' : 'ko' }} -{{ object.self.undefinedMethod is defined ? 'ko' : 'ok' }} -{{ object.undefinedMethod.self is defined ? 'ko' : 'ok' }} ---DATA-- -return array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'nullVar' => null, - 'nested' => array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'nullVar' => null, - 'definedArray' => array(0), - ), - 'object' => new TwigTestFoo(), -); ---EXPECT-- -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok ---DATA-- -return array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'nullVar' => null, - 'nested' => array( - 'definedVar' => 'defined', - 'zeroVar' => 0, - 'nullVar' => null, - 'definedArray' => array(0), - ), - 'object' => new TwigTestFoo(), -); ---CONFIG-- -return array('strict_variables' => false) ---EXPECT-- -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok -ok ---TEST-- -"empty" test ---TEMPLATE-- -{{ foo is empty ? 'ok' : 'ko' }} -{{ bar is empty ? 'ok' : 'ko' }} -{{ foobar is empty ? 'ok' : 'ko' }} -{{ array is empty ? 'ok' : 'ko' }} -{{ zero is empty ? 'ok' : 'ko' }} -{{ string is empty ? 'ok' : 'ko' }} -{{ countable_empty is empty ? 'ok' : 'ko' }} -{{ countable_not_empty is empty ? 'ok' : 'ko' }} -{{ markup_empty is empty ? 'ok' : 'ko' }} -{{ markup_not_empty is empty ? 'ok' : 'ko' }} ---DATA-- - -class CountableStub implements Countable -{ - private $items; - - public function __construct(array $items) - { - $this->items = $items; - } - - public function count() - { - return count($this->items); - } -} -return array( - 'foo' => '', 'bar' => null, 'foobar' => false, 'array' => array(), 'zero' => 0, 'string' => '0', - 'countable_empty' => new CountableStub(array()), 'countable_not_empty' => new CountableStub(array(1, 2)), - 'markup_empty' => new Twig_Markup('', 'UTF-8'), 'markup_not_empty' => new Twig_Markup('test', 'UTF-8'), -); ---EXPECT-- -ok -ok -ok -ok -ko -ko -ok -ko -ok -ko ---TEST-- -"even" test ---TEMPLATE-- -{{ 1 is even ? 'ko' : 'ok' }} -{{ 2 is even ? 'ok' : 'ko' }} -{{ 1 is not even ? 'ok' : 'ko' }} -{{ 2 is not even ? 'ko' : 'ok' }} ---DATA-- -return array() ---EXPECT-- -ok -ok -ok -ok ---TEST-- -Twig supports the in operator ---TEMPLATE-- -{% if bar in foo %} -TRUE -{% endif %} -{% if not (bar in foo) %} -{% else %} -TRUE -{% endif %} -{% if bar not in foo %} -{% else %} -TRUE -{% endif %} -{% if 'a' in bar %} -TRUE -{% endif %} -{% if 'c' not in bar %} -TRUE -{% endif %} -{% if '' not in bar %} -TRUE -{% endif %} -{% if '' in '' %} -TRUE -{% endif %} -{% if '0' not in '' %} -TRUE -{% endif %} -{% if 'a' not in '0' %} -TRUE -{% endif %} -{% if '0' in '0' %} -TRUE -{% endif %} -{{ false in [0, 1] ? 'TRUE' : 'FALSE' }} -{{ true in [0, 1] ? 'TRUE' : 'FALSE' }} -{{ '0' in [0, 1] ? 'TRUE' : 'FALSE' }} -{{ '' in [0, 1] ? 'TRUE' : 'FALSE' }} -{{ 0 in ['', 1] ? 'TRUE' : 'FALSE' }} -{{ '' in 'foo' ? 'TRUE' : 'FALSE' }} -{{ 0 in 'foo' ? 'TRUE' : 'FALSE' }} -{{ false in 'foo' ? 'TRUE' : 'FALSE' }} -{{ true in '100' ? 'TRUE' : 'FALSE' }} -{{ [] in 'Array' ? 'TRUE' : 'FALSE' }} -{{ [] in [true, false] ? 'TRUE' : 'FALSE' }} -{{ [] in [true, ''] ? 'TRUE' : 'FALSE' }} -{{ [] in [true, []] ? 'TRUE' : 'FALSE' }} -{{ dir_object in 'foo'~dir_name ? 'TRUE' : 'FALSE' }} -{{ 5 in 125 ? 'TRUE' : 'FALSE' }} ---DATA-- -return array('bar' => 'bar', 'foo' => array('bar' => 'bar'), 'dir_name' => dirname(__FILE__), 'dir_object' => new SplFileInfo(dirname(__FILE__))) ---EXPECT-- -TRUE -TRUE -TRUE -TRUE -TRUE -TRUE -TRUE -TRUE -TRUE -FALSE -FALSE -FALSE -FALSE -FALSE -TRUE -FALSE -FALSE -FALSE -FALSE -FALSE -FALSE -TRUE -FALSE -FALSE ---TEST-- -Twig supports the in operator when using objects ---TEMPLATE-- -{% if object in object_list %} -TRUE -{% endif %} ---DATA-- -$foo = new TwigTestFoo(); -$foo1 = new TwigTestFoo(); - -$foo->position = $foo1; -$foo1->position = $foo; - -return array( - 'object' => $foo, - 'object_list' => array($foo1, $foo), -); ---EXPECT-- -TRUE ---TEST-- -"iterable" test ---TEMPLATE-- -{{ foo is iterable ? 'ok' : 'ko' }} -{{ traversable is iterable ? 'ok' : 'ko' }} -{{ obj is iterable ? 'ok' : 'ko' }} -{{ val is iterable ? 'ok' : 'ko' }} ---DATA-- -return array( - 'foo' => array(), - 'traversable' => new ArrayIterator(array()), - 'obj' => new stdClass(), - 'val' => 'test', -); ---EXPECT-- -ok -ok -ko -ko--TEST-- -"odd" test ---TEMPLATE-- -{{ 1 is odd ? 'ok' : 'ko' }} -{{ 2 is odd ? 'ko' : 'ok' }} ---DATA-- -return array() ---EXPECT-- -ok -ok diff --git a/tests/examplefiles/type.lisp b/tests/examplefiles/type.lisp deleted file mode 100644 index c02c29df..00000000 --- a/tests/examplefiles/type.lisp +++ /dev/null @@ -1,1218 +0,0 @@ -;;;; TYPEP und Verwandtes -;;;; Michael Stoll, 21. 10. 1988 -;;;; Bruno Haible, 10.6.1989 -;;;; Sam Steingold 2000-2005 - -;;; Datenstrukturen für TYPEP: -;;; - Ein Type-Specifier-Symbol hat auf seiner Propertyliste unter dem -;;; Indikator SYS::TYPE-SYMBOL eine Funktion von einem Argument, die -;;; testet, ob ein Objekt vom richtigen Typ ist. -;;; - Ein Symbol, das eine Type-Specifier-Liste beginnen kann, hat auf seiner -;;; Propertyliste unter dem Indikator SYS::TYPE-LIST eine Funktion von -;;; einem Argument für das zu testende Objekt und zusätzlichen Argumenten -;;; für die Listenelemente. -;;; - Ein Symbol, das als Typmacro definiert wurde, hat auf seiner Property- -;;; liste unter dem Indikator SYSTEM::DEFTYPE-EXPANDER den zugehörigen -;;; Expander: eine Funktion, die den zu expandierenden Type-Specifier (eine -;;; mindestens einelementige Liste) als Argument bekommt. - -(in-package "EXT") -(export '(type-expand)) -(in-package "SYSTEM") - -; vorläufig, solange bis clos.lisp geladen wird: -(eval-when (eval) - (predefun clos::built-in-class-p (object) (declare (ignore object)) nil)) -(unless (fboundp 'clos::class-name) - (defun clos::class-name (c) (declare (ignore c)) nil) -) - -(defun typespec-error (fun type) - (error-of-type 'error - (TEXT "~S: invalid type specification ~S") - fun type -) ) - -;; ============================================================================ - -;; return the CLOS class named by TYPESPEC or NIL -(defun clos-class (typespec) - (let ((cc (get typespec 'CLOS::CLOSCLASS))) - (when (and cc (clos::defined-class-p cc) (eq (clos:class-name cc) typespec)) - cc))) - -;;; TYPEP, CLTL S. 72, S. 42-51 -(defun typep (x y &optional env &aux f) ; x = Objekt, y = Typ - (declare (ignore env)) - (setq y (expand-deftype y)) - (cond - ((symbolp y) - (cond ((setq f (get y 'TYPE-SYMBOL)) (funcall f x)) - ((setq f (get y 'TYPE-LIST)) (funcall f x)) - ((setq f (get y 'DEFSTRUCT-DESCRIPTION)) (ds-typep x y f)) - ((setq f (clos-class y)) - ; It's not worth handling structure classes specially here. - (clos::typep-class x f)) - (t (typespec-error 'typep y)) - ) ) - ((and (consp y) (symbolp (first y))) - (cond - ((and (eq (first y) 'SATISFIES) (eql (length y) 2)) - (unless (symbolp (second y)) - (error-of-type 'error - (TEXT "~S: argument to SATISFIES must be a symbol: ~S") - 'typep (second y) - ) ) - (if (funcall (symbol-function (second y)) x) t nil) - ) - ((eq (first y) 'MEMBER) - (if (member x (rest y)) t nil) - ) - ((and (eq (first y) 'EQL) (eql (length y) 2)) - (eql x (second y)) - ) - ((and (eq (first y) 'NOT) (eql (length y) 2)) - (not (typep x (second y))) - ) - ((eq (first y) 'AND) - (dolist (type (rest y) t) - (unless (typep x type) (return nil)) - ) ) - ((eq (first y) 'OR) - (dolist (type (rest y) nil) - (when (typep x type) (return t)) - ) ) - ((setq f (get (first y) 'TYPE-LIST)) (apply f x (rest y))) - (t (typespec-error 'typep y)) - ) ) - ((clos::defined-class-p y) (clos::typep-class x y)) - ((clos::eql-specializer-p y) (eql x (clos::eql-specializer-singleton y))) - ((encodingp y) (charset-typep x y)) - (t (typespec-error 'typep y)) -) ) - -;; ---------------------------------------------------------------------------- - -;; UPGRADED-ARRAY-ELEMENT-TYPE is a lattice homomorphism, see -;; ANSI CL 15.1.2.1. -(defun upgraded-array-element-type (type &optional environment) - (declare (ignore environment)) - ;; see array.d - (case type - ((BIT) 'BIT) - ((CHARACTER) 'CHARACTER) - ((T) 'T) - ((NIL) 'NIL) - (t (if (subtypep type 'NIL) - 'NIL - (multiple-value-bind (low high) (sys::subtype-integer type) - ; Es gilt (or (null low) (subtypep type `(INTEGER ,low ,high))) - (if (and (integerp low) (not (minusp low)) (integerp high)) - (let ((l (integer-length high))) - ; Es gilt (subtypep type `(UNSIGNED-BYTE ,l)) - (cond ((<= l 1) 'BIT) - ((<= l 2) '(UNSIGNED-BYTE 2)) - ((<= l 4) '(UNSIGNED-BYTE 4)) - ((<= l 8) '(UNSIGNED-BYTE 8)) - ((<= l 16) '(UNSIGNED-BYTE 16)) - ((<= l 32) '(UNSIGNED-BYTE 32)) - (t 'T))) - (if (subtypep type 'CHARACTER) - 'CHARACTER - 'T))))))) - -;; ---------------------------------------------------------------------------- - -;; UPGRADED-COMPLEX-PART-TYPE is a lattice homomorphism, see -;; HyperSpec/Body/fun_complex.html and HyperSpec/Body/syscla_complex.html, -;; and an idempotent. Therefore -;; (subtypep (upgraded-complex-part-type T1) (upgraded-complex-part-type T2)) -;; is equivalent to -;; (subtypep T1 (upgraded-complex-part-type T2)) -;; (Proof: Let U T be an abbreviation for (upgraded-complex-part-type T). -;; If U T1 <= U T2, then T1 <= U T1 <= U T2. -;; If T1 <= U T2, then by homomorphism U T1 <= U U T2 = U T2.) -;; -;; For _any_ CL implementation, you could define -;; (defun upgraded-complex-part-type (type) 'REAL) -;; Likewise for _any_ CL implementation, you could define -;; (defun upgraded-complex-part-type (type) type) -;; or - again for _any_ CL implementation: -;; (defun upgraded-complex-part-type (type) -;; (cond ((subtypep type 'NIL) 'NIL) -;; ((subtypep type 'SHORT-FLOAT) 'SHORT-FLOAT) -;; ((subtypep type 'SINGLE-FLOAT) 'SINGLE-FLOAT) -;; ((subtypep type 'DOUBLE-FLOAT) 'DOUBLE-FLOAT) -;; ((subtypep type 'LONG-FLOAT) 'LONG-FLOAT) -;; ((subtypep type 'RATIONAL) 'RATIONAL) -;; ((subtypep type 'REAL) 'REAL) -;; (t (error ...)))) -;; The reason is that a complex number is immutable: no setters for the -;; realpart and imagpart exist. -;; -;; We choose the second implementation because it allows the most precise -;; type inference. -(defun upgraded-complex-part-type (type &optional environment) - (declare (ignore environment)) - (if (subtypep type 'REAL) - type - (error-of-type 'error - (TEXT "~S: type ~S is not a subtype of ~S") - 'upgraded-complex-part-type type 'real))) - -;; ---------------------------------------------------------------------------- - -;; Macros for defining the various built-in "atomic type specifier"s and -;; "compound type specifier"s. The following macros add information for both -;; the TYPEP function above and the c-TYPEP in the compiler. - -; Alist symbol -> funname, used by the compiler. -(defparameter c-typep-alist1 '()) -; Alist symbol -> lambdabody, used by the compiler. -(defparameter c-typep-alist2 '()) -; Alist symbol -> expander function, used by the compiler. -(defparameter c-typep-alist3 '()) - -; (def-atomic-type symbol function-name) -; defines an atomic type. The function-name designates a function taking one -; argument and returning a generalized boolean value. It can be either a -; symbol or a lambda expression. -(defmacro def-atomic-type (symbol funname) - (let ((lambdap (and (consp funname) (eq (car funname) 'LAMBDA)))) - `(PROGN - (SETF (GET ',symbol 'TYPE-SYMBOL) - ,(if lambdap - `(FUNCTION ,(concat-pnames "TYPE-SYMBOL-" symbol) ,funname) - `(FUNCTION ,funname) - ) - ) - ,(if lambdap - `(SETQ C-TYPEP-ALIST2 - (NCONC C-TYPEP-ALIST2 (LIST (CONS ',symbol ',(cdr funname)))) - ) - `(SETQ C-TYPEP-ALIST1 - (NCONC C-TYPEP-ALIST1 (LIST (CONS ',symbol ',funname))) - ) - ) - ',symbol - ) -) ) - -; (def-compound-type symbol lambda-list (x) check-form typep-form c-typep-form) -; defines a compound type. The lambda-list is of the form (&optional ...) -; where the arguments come from the CDR of the type specifier. -; For typep-form, x is an object. -; For c-typep-form, x is a multiply evaluatable form (actually a gensym). -; check-form is a form performing error checking, may call `error'. -; typep-form should return a generalized boolean value. -; c-typep-form should produce a form returning a generalized boolean value. -(defmacro def-compound-type (symbol lambdalist (var) check-form typep-form c-typep-form) - `(PROGN - (SETF (GET ',symbol 'TYPE-LIST) - (FUNCTION ,(concat-pnames "TYPE-LIST-" symbol) - (LAMBDA (,var ,@lambdalist) - ,@(if check-form - `((MACROLET ((ERROR (&REST ERROR-ARGS) - (LIST* 'ERROR-OF-TYPE ''ERROR ERROR-ARGS) - )) - ,check-form - )) - ) - ,typep-form - ) ) ) - (SETQ C-TYPEP-ALIST3 - (NCONC C-TYPEP-ALIST3 - (LIST (CONS ',symbol - #'(LAMBDA (,var ,@lambdalist &REST ILLEGAL-ARGS) - (DECLARE (IGNORE ILLEGAL-ARGS)) - ,@(if check-form - `((MACROLET ((ERROR (&REST ERROR-ARGS) - (LIST 'PROGN - (LIST* 'C-WARN ERROR-ARGS) - '(THROW 'C-TYPEP NIL) - )) ) - ,check-form - )) - ) - ,c-typep-form - ) - ) ) ) ) - ',symbol - ) -) - -; CLtL1 p. 43 -(def-atomic-type ARRAY arrayp) -(def-atomic-type ATOM atom) -(def-atomic-type BASE-CHAR - #+BASE-CHAR=CHARACTER - characterp - #-BASE-CHAR=CHARACTER - (lambda (x) (and (characterp x) (base-char-p x))) -) -(def-atomic-type BASE-STRING - (lambda (x) - (and (stringp x) - (eq (array-element-type x) - #+BASE-CHAR=CHARACTER 'CHARACTER #-BASE-CHAR=CHARACTER 'BASE-CHAR -) ) ) ) -(def-atomic-type BIGNUM - (lambda (x) (and (integerp x) (not (fixnump x)))) -) -(def-atomic-type BIT - (lambda (x) (or (eql x 0) (eql x 1))) -) -(def-atomic-type BIT-VECTOR bit-vector-p) -(def-atomic-type BOOLEAN - (lambda (x) (or (eq x 'nil) (eq x 't))) -) -(def-atomic-type CHARACTER characterp) -(def-atomic-type COMPILED-FUNCTION compiled-function-p) -(def-atomic-type COMPLEX complexp) -(def-atomic-type CONS consp) -(def-atomic-type DOUBLE-FLOAT double-float-p) -(def-atomic-type ENCODING encodingp) -(def-atomic-type EXTENDED-CHAR - #+BASE-CHAR=CHARACTER - (lambda (x) (declare (ignore x)) nil) - #-BASE-CHAR=CHARACTER - (lambda (x) (and (characterp x) (not (base-char-p x)))) -) -(def-atomic-type FIXNUM fixnump) -(def-atomic-type FLOAT floatp) -(def-atomic-type FUNCTION functionp) -(def-atomic-type HASH-TABLE hash-table-p) -(def-atomic-type INTEGER integerp) -(def-atomic-type KEYWORD keywordp) -(def-atomic-type LIST listp) -#+LOGICAL-PATHNAMES -(def-atomic-type LOGICAL-PATHNAME logical-pathname-p) -(def-atomic-type LONG-FLOAT long-float-p) -(def-atomic-type NIL - (lambda (x) (declare (ignore x)) nil) -) -(def-atomic-type NULL null) -(def-atomic-type NUMBER numberp) -(def-atomic-type PACKAGE packagep) -(def-atomic-type PATHNAME pathnamep) -(def-atomic-type RANDOM-STATE random-state-p) -(def-atomic-type RATIO - (lambda (x) (and (rationalp x) (not (integerp x)))) -) -(def-atomic-type RATIONAL rationalp) -(def-atomic-type READTABLE readtablep) -(def-atomic-type REAL realp) -(def-atomic-type SEQUENCE sequencep) -(def-atomic-type SHORT-FLOAT short-float-p) -(def-atomic-type SIMPLE-ARRAY simple-array-p) -(def-atomic-type SIMPLE-BASE-STRING - (lambda (x) - (and (simple-string-p x) - (eq (array-element-type x) - #+BASE-CHAR=CHARACTER 'CHARACTER #-BASE-CHAR=CHARACTER 'BASE-CHAR -) ) ) ) -(def-atomic-type SIMPLE-BIT-VECTOR simple-bit-vector-p) -(def-atomic-type SIMPLE-STRING simple-string-p) -(def-atomic-type SIMPLE-VECTOR simple-vector-p) -(def-atomic-type SINGLE-FLOAT single-float-p) -(defun %standard-char-p (x) (and (characterp x) (standard-char-p x))) ; ABI -(def-atomic-type STANDARD-CHAR %standard-char-p) -(def-atomic-type CLOS:STANDARD-OBJECT clos::std-instance-p) -(def-atomic-type STREAM streamp) -(def-atomic-type FILE-STREAM file-stream-p) -(def-atomic-type SYNONYM-STREAM synonym-stream-p) -(def-atomic-type BROADCAST-STREAM broadcast-stream-p) -(def-atomic-type CONCATENATED-STREAM concatenated-stream-p) -(def-atomic-type TWO-WAY-STREAM two-way-stream-p) -(def-atomic-type ECHO-STREAM echo-stream-p) -(def-atomic-type STRING-STREAM string-stream-p) -(def-atomic-type STRING stringp) -(def-atomic-type STRING-CHAR characterp) -(def-atomic-type CLOS:STRUCTURE-OBJECT clos::structure-object-p) -(def-atomic-type SYMBOL symbolp) -(def-atomic-type T (lambda (x) (declare (ignore x)) t)) -;; foreign1.lisp is loaded after this file, -;; so these symbols are not external yet -#+ffi -(def-atomic-type ffi::foreign-function - (lambda (x) (eq 'ffi::foreign-function (type-of x)))) -#+ffi -(def-atomic-type ffi::foreign-variable - (lambda (x) (eq 'ffi::foreign-variable (type-of x)))) -#+ffi -(def-atomic-type ffi::foreign-address - (lambda (x) (eq 'ffi::foreign-address (type-of x)))) -;; see lispbibl.d (#define FOREIGN) and predtype.d (TYPE-OF): -#+(or unix ffi affi win32) -(def-atomic-type foreign-pointer - (lambda (x) (eq 'foreign-pointer (type-of x)))) -(def-atomic-type VECTOR vectorp) -(def-atomic-type PLIST - (lambda (x) (multiple-value-bind (length tail) (list-length-dotted x) - (and (null tail) (evenp length))))) - -(defmacro ensure-dim (type dim) - ;; make sure DIM is a valid dimension - `(unless (or (eq ,dim '*) (typep ,dim `(INTEGER 0 (,ARRAY-DIMENSION-LIMIT)))) - (error (TEXT "~S: dimension ~S is invalid") ',type ,dim))) - -(defmacro ensure-rank (type rank) - ;; make sure RANK is a valid rank - `(unless (typep ,rank `(INTEGER 0 (,ARRAY-RANK-LIMIT))) - (error (TEXT "~S: rank ~S is invalid") ',type ,rank))) - -; CLtL1 p. 46-50 -(defun c-typep-array (tester el-type dims x) - `(AND (,tester ,x) - ,@(if (eq el-type '*) - '() - `((EQUAL (ARRAY-ELEMENT-TYPE ,x) ',(upgraded-array-element-type el-type))) - ) - ,@(if (eq dims '*) - '() - (if (numberp dims) - `((EQL ,dims (ARRAY-RANK ,x))) - `((EQL ,(length dims) (ARRAY-RANK ,x)) - ,@(let ((i 0)) - (mapcap #'(lambda (dim) - (prog1 - (if (eq dim '*) - '() - `((EQL ',dim (ARRAY-DIMENSION ,x ,i))) - ) - (incf i) - ) ) - dims - ) ) - ) - ) ) - ) -) -(defun c-typep-vector (tester size x) - `(AND (,tester ,x) - ,@(if (eq size '*) - '() - `((EQL ',size (ARRAY-DIMENSION ,x 0))) - ) - ) -) -(defun typep-number-test (x low high test type) - (and (funcall test x) - (cond ((eq low '*)) - ((funcall test low) (<= low x)) - ((and (consp low) (null (rest low)) (funcall test (first low))) - (< (first low) x) - ) - (t (error-of-type 'error - #1=(TEXT "~S: argument to ~S must be *, ~S or a list of ~S: ~S") - 'typep type type type low - ) ) ) - (cond ((eq high '*)) - ((funcall test high) (>= high x)) - ((and (consp high) (null (rest high)) (funcall test (first high))) - (> (first high) x) - ) - (t (error-of-type 'error - #1# 'typep type type type high -) ) ) ) ) -(defun c-typep-number (caller tester low high x) - `(AND (,tester ,x) - ,@(cond ((eq low '*) '()) - ((funcall tester low) `((<= ,low ,x))) - ((and (consp low) (null (rest low)) (funcall tester (first low))) - `((< ,(first low) ,x)) - ) - (t (c-warn #1=(TEXT "~S: argument to ~S must be *, ~S or a list of ~S: ~S") - 'typep caller caller caller low - ) - (throw 'c-TYPEP nil) - ) ) - ,@(cond ((eq high '*) '()) - ((funcall tester high) `((>= ,high ,x))) - ((and (consp high) (null (rest high)) (funcall tester (first high))) - `((> ,(first high) ,x)) - ) - (t (c-warn #1# 'typep caller caller caller high) - (throw 'c-TYPEP nil) - ) ) - ) -) -(def-compound-type ARRAY (&optional (el-type '*) (dims '*)) (x) - (unless (eq dims '*) - (if (numberp dims) - (ensure-rank ARRAY dims) - (dolist (dim dims) (ensure-dim ARRAY dim)))) - (and (arrayp x) - (or (eq el-type '*) - (equal (array-element-type x) (upgraded-array-element-type el-type)) - ) - (or (eq dims '*) - (if (numberp dims) - (eql dims (array-rank x)) - (and (eql (length dims) (array-rank x)) - (every #'(lambda (a b) (or (eq a '*) (eql a b))) - dims (array-dimensions x) - ) ) ) ) ) - (c-typep-array 'ARRAYP el-type dims x) -) -(def-compound-type SIMPLE-ARRAY (&optional (el-type '*) (dims '*)) (x) - (unless (eq dims '*) - (if (numberp dims) - (ensure-rank SIMPLE-ARRAY dims) - (dolist (dim dims) (ensure-dim SIMPLE-ARRAY dim)))) - (and (simple-array-p x) - (or (eq el-type '*) - (equal (array-element-type x) (upgraded-array-element-type el-type)) - ) - (or (eq dims '*) - (if (numberp dims) - (eql dims (array-rank x)) - (and (eql (length dims) (array-rank x)) - (every #'(lambda (a b) (or (eq a '*) (eql a b))) - dims (array-dimensions x) - ) ) ) ) ) - (c-typep-array 'SIMPLE-ARRAY-P el-type dims x) -) -(def-compound-type VECTOR (&optional (el-type '*) (size '*)) (x) - (ensure-dim VECTOR size) - (and (vectorp x) - (or (eq el-type '*) - (equal (array-element-type x) (upgraded-array-element-type el-type)) - ) - (or (eq size '*) (eql (array-dimension x 0) size)) - ) - `(AND (VECTORP ,x) - ,@(if (eq el-type '*) - '() - `((EQUAL (ARRAY-ELEMENT-TYPE ,x) ',(upgraded-array-element-type el-type))) - ) - ,@(if (eq size '*) - '() - `((EQL (ARRAY-DIMENSION ,x 0) ',size)) - ) - ) -) -(def-compound-type SIMPLE-VECTOR (&optional (size '*)) (x) - (ensure-dim SIMLPE-VECTOR size) - (and (simple-vector-p x) - (or (eq size '*) (eql size (array-dimension x 0))) - ) - (c-typep-vector 'SIMPLE-VECTOR-P size x) -) -(def-compound-type COMPLEX (&optional (rtype '*) (itype rtype)) (x) - nil - (and (complexp x) - (or (eq rtype '*) - (typep (realpart x) (upgraded-complex-part-type rtype))) - (or (eq itype '*) - (typep (imagpart x) (upgraded-complex-part-type itype)))) - `(AND (COMPLEXP ,x) - ,@(if (eq rtype '*) - '() - `((TYPEP (REALPART ,x) ',(upgraded-complex-part-type rtype)))) - ,@(if (eq itype '*) - '() - `((TYPEP (IMAGPART ,x) ',(upgraded-complex-part-type itype)))))) -(def-compound-type INTEGER (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'integerp 'INTEGER) - (c-typep-number 'INTEGER 'INTEGERP low high x) -) -(def-compound-type MOD (n) (x) - (unless (integerp n) - (error (TEXT "~S: argument to MOD must be an integer: ~S") - 'typep n - ) ) - (and (integerp x) (<= 0 x) (< x n)) - `(AND (INTEGERP ,x) (NOT (MINUSP ,x)) (< ,x ,n)) -) -(def-compound-type SIGNED-BYTE (&optional (n '*)) (x) - (unless (or (eq n '*) (integerp n)) - (error (TEXT "~S: argument to SIGNED-BYTE must be an integer or * : ~S") - 'typep n - ) ) - (and (integerp x) (or (eq n '*) (< (integer-length x) n))) - `(AND (INTEGERP ,x) - ,@(if (eq n '*) '() `((< (INTEGER-LENGTH ,x) ,n))) - ) -) -(def-compound-type UNSIGNED-BYTE (&optional (n '*)) (x) - (unless (or (eq n '*) (integerp n)) - (error (TEXT "~S: argument to UNSIGNED-BYTE must be an integer or * : ~S") - 'typep n - ) ) - (and (integerp x) - (not (minusp x)) - (or (eq n '*) (<= (integer-length x) n)) - ) - `(AND (INTEGERP ,x) (NOT (MINUSP ,x)) - ,@(if (eq n '*) '() `((<= (INTEGER-LENGTH ,x) ,n))) - ) -) -(def-compound-type REAL (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'realp 'REAL) - (c-typep-number 'REAL 'REALP low high x) -) -(def-compound-type RATIONAL (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'rationalp 'RATIONAL) - (c-typep-number 'RATIONAL 'RATIONALP low high x) -) -(def-compound-type FLOAT (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'floatp 'FLOAT) - (c-typep-number 'FLOAT 'FLOATP low high x) -) -(def-compound-type SHORT-FLOAT (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'short-float-p 'SHORT-FLOAT) - (c-typep-number 'SHORT-FLOAT 'SHORT-FLOAT-P low high x) -) -(def-compound-type SINGLE-FLOAT (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'single-float-p 'SINGLE-FLOAT) - (c-typep-number 'SINGLE-FLOAT 'SINGLE-FLOAT-P low high x) -) -(def-compound-type DOUBLE-FLOAT (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'double-float-p 'DOUBLE-FLOAT) - (c-typep-number 'DOUBLE-FLOAT 'DOUBLE-FLOAT-P low high x) -) -(def-compound-type LONG-FLOAT (&optional (low '*) (high '*)) (x) - nil - (typep-number-test x low high #'long-float-p 'LONG-FLOAT) - (c-typep-number 'LONG-FLOAT 'LONG-FLOAT-P low high x) -) -(def-compound-type STRING (&optional (size '*)) (x) - (ensure-dim STRING size) - (and (stringp x) - (or (eq size '*) (eql size (array-dimension x 0))) - ) - (c-typep-vector 'STRINGP size x) -) -(def-compound-type SIMPLE-STRING (&optional (size '*)) (x) - (ensure-dim SIMPLE-STRING size) - (and (simple-string-p x) - (or (eq size '*) (eql size (array-dimension x 0))) - ) - (c-typep-vector 'SIMPLE-STRING-P size x) -) -(def-compound-type BASE-STRING (&optional (size '*)) (x) - (ensure-dim BASE-STRING size) - (and (stringp x) - (or (eq size '*) (eql size (array-dimension x 0))) - ) - (c-typep-vector 'STRINGP size x) -) -(def-compound-type SIMPLE-BASE-STRING (&optional (size '*)) (x) - (ensure-dim SIMPLE-BASE-STRING size) - (and (simple-string-p x) - (or (eq size '*) (eql size (array-dimension x 0))) - ) - (c-typep-vector 'SIMPLE-STRING-P size x) -) -(def-compound-type BIT-VECTOR (&optional (size '*)) (x) - (ensure-dim BIT-VECTOR size) - (and (bit-vector-p x) - (or (eq size '*) (eql size (array-dimension x 0))) - ) - (c-typep-vector 'BIT-VECTOR-P size x) -) -(def-compound-type SIMPLE-BIT-VECTOR (&optional (size '*)) (x) - (ensure-dim SIMPLE-BIT-VECTOR size) - (and (simple-bit-vector-p x) - (or (eq size '*) (eql size (array-dimension x 0))) - ) - (c-typep-vector 'SIMPLE-BIT-VECTOR-P size x) -) -(def-compound-type CONS (&optional (car-type '*) (cdr-type '*)) (x) - nil - (and (consp x) - (or (eq car-type '*) (typep (car x) car-type)) - (or (eq cdr-type '*) (typep (cdr x) cdr-type)) - ) - `(AND (CONSP ,x) - ,@(if (eq car-type '*) '() `((TYPEP (CAR ,x) ',car-type))) - ,@(if (eq cdr-type '*) '() `((TYPEP (CDR ,x) ',cdr-type))) - ) -) - -(fmakunbound 'def-compound-type) - -;; ---------------------------------------------------------------------------- - -; Typtest ohne Gefahr einer Fehlermeldung. Für SIGNAL und HANDLER-BIND. -(defun safe-typep (x y &optional env) - (let ((*error-handler* - #'(lambda (&rest error-args) - (declare (ignore error-args)) - (return-from safe-typep (values nil nil)) - )) ) - (values (typep x y env) t) -) ) - -; Umwandlung eines "type for declaration" in einen "type for discrimination". -(defun type-for-discrimination (y &optional (notp nil) &aux f) - (cond ((symbolp y) - (cond ((get y 'TYPE-SYMBOL) y) - ((get y 'TYPE-LIST) y) - ((setq f (get y 'DEFTYPE-EXPANDER)) - (let* ((z (funcall f (list y))) - (zx (type-for-discrimination z notp))) - (if (eql zx z) y zx) - )) - (t y) - ) ) - ((and (consp y) (symbolp (first y))) - (case (first y) - ((SATISFIES MEMBER EQL) y) - (NOT - (let* ((z (second y)) - (zx (type-for-discrimination z (not notp)))) - (if (eql zx z) y `(NOT ,zx)) - )) - ((AND OR COMPLEX VALUES) - (let* ((z (rest y)) - (zx (mapcar #'(lambda (x) (type-for-discrimination x notp)) z))) - (if (every #'eql z zx) y (cons (first y) zx)) - )) - (FUNCTION - ;; (FUNCTION arg-types res-type) is somewhere between - ;; NIL and FUNCTION, but undecidable. - (if notp 'NIL 'FUNCTION) - ) - (t (cond ((get (first y) 'TYPE-LIST) y) - ((setq f (get (first y) 'DEFTYPE-EXPANDER)) - (let* ((z (funcall f y)) - (zx (type-for-discrimination z notp))) - (if (eql zx z) y zx) - )) - (t y) - ) ) ) ) - (t y) -) ) - -; Testet eine Liste von Werten auf Erfüllen eines Type-Specifiers. Für THE. -(defun %the (values type) ; ABI - (macrolet ((near-typep (objform typform) - ;; near-typep ist wie typep, nur dass das Objekt auch ein - ;; Read-Label sein darf. Das tritt z.B. auf bei - ;; (read-from-string "#1=#S(FOO :X #1#)") - ;; im Konstruktor MAKE-FOO. Die Implementation ist aber - ;; nicht gezwungen, bei fehlerhaftem THE zwingend einen - ;; Fehler zu melden, darum ist ein lascherer Typcheck hier - ;; erlaubt. - (let ((g (gensym))) - `(let ((,g ,objform)) - (or (typep ,g ,typform) (eq (type-of ,g) 'READ-LABEL)))))) - (if (and (consp type) (eq (car type) 'VALUES)) - ;; The VALUES type specifier is ill-defined in ANSI CL. - ;; - ;; There are two possibilities to define a VALUES type specifier in a - ;; sane way: - ;; - (EXACT-VALUES type1 ... [&optional ...]) describes the exact shape - ;; of the values list, as received by MULTIPLE-VALUE-LIST. - ;; For example, (EXACT-VALUES SYMBOL) is matched by (values 'a) but not - ;; by (values 'a 'b) or (values). - ;; - (ASSIGNABLE-VALUES type1 ... [&optional ...]) describes the values - ;; as received by a set of variables through MULTIPLE-VALUE-BIND or - ;; MULTIPLE-VALUE-SETQ. For example, (ASSIGNABLE-VALUES SYMBOL) is - ;; defined by whether - ;; (MULTIPLE-VALUE-BIND (var1) values (DECLARE (TYPE SYMBOL var1)) ...) - ;; is valid or not; therefore (ASSIGNABLE-VALUES SYMBOL) is matched by - ;; (values 'a) and (values 'a 'b) and (values). - ;; Note that &OPTIONAL is actually redundant here: - ;; (ASSIGNABLE-VALUES type1 ... &optional otype1 ...) - ;; is equivalent to - ;; (ASSIGNABLE-VALUES type1 ... (OR NULL otype1) ...) - ;; HyperSpec/Body/typspe_values.html indicates that VALUES means - ;; EXACT-VALUES; however, HyperSpec/Body/speope_the.html indicates that - ;; VALUES means ASSIGNABLE-VALUES. - ;; - ;; SBCL interprets the VALUES type specifier to mean EXACT-VALUES when - ;; it contains &OPTIONAL or &REST, but ASSIGNABLE-VALUES when it has - ;; only a tuple of type specifiers. This is utter nonsense, in particular - ;; because it makes (VALUES type1 ... typek &OPTIONAL) - ;; different from (VALUES type1 ... typek). - ;; - ;; Here we use the ASSIGNABLE-VALUES interpretation. - ;; In SUBTYPEP we just punt and don't assume any interpretation. - (let ((vals values) (types (cdr type))) - ;; required: - (loop - (when (or (atom types) (atom vals)) (return-from %the t)) - (when (memq (car types) lambda-list-keywords) (return)) - (unless (near-typep (pop vals) (pop types)) - (return-from %the nil))) - ;; &optional: - (when (and (consp types) (eq (car types) '&optional)) - (setq types (cdr types)) - (loop - (when (or (atom types) (atom vals)) (return-from %the t)) - (when (memq (car types) lambda-list-keywords) (return)) - (unless (near-typep (pop vals) (pop types)) - (return-from %the nil)))) - ;; &rest &key: - (case (car types) - (&rest - (setq types (cdr types)) - (when (atom types) (typespec-error 'the type)) - (unless (near-typep (pop vals) (pop types)) - (return-from %the nil))) - (&key) - (t (typespec-error 'the type))) - (if (eq (car types) '&key) - (progn - (setq types (cdr types)) - (when (oddp (length vals)) (return-from %the nil)) - (let ((keywords nil)) - (loop - (when (or (atom types) (atom vals)) (return-from %the t)) - (when (memq (car types) lambda-list-keywords) (return)) - (let ((item (pop types))) - (unless (and (listp item) (eql (length item) 2) - (symbolp (first item))) - (typespec-error 'the type)) - (let ((kw (symbol-to-keyword (first item)))) - (unless (near-typep (getf vals kw) (second item)) - (return-from %the nil)) - (push kw keywords)))) - (if (and (consp types) (eq (car types) '&allow-other-keys)) - (setq types (cdr types)) - (unless (getf vals ':allow-other-keys) - (do ((L vals (cddr L))) - ((atom L)) - (unless (memq (car L) keywords) - (return-from %the nil))))))) - (when (consp types) (typespec-error 'the type))) - t) - (near-typep (if (consp values) (car values) nil) type)))) - -;;; =========================================================================== - -;; SUBTYPEP -(load "subtypep") - - -;; Returns the number of bytes that are needed to represent #\Null in a -;; given encoding. -(defun encoding-zeroes (encoding) - #+UNICODE - ;; this should use min_bytes_per_char for cache, not the hash table - (let ((name (ext:encoding-charset encoding)) - (table #.(make-hash-table :key-type '(or string symbol) :value-type 'fixnum - :test 'stablehash-equal :warn-if-needs-rehash-after-gc t - :initial-contents '(("UTF-7" . 1)))) - (tester #.(make-string 2 :initial-element (code-char 0)))) - (or (gethash name table) - (setf (gethash name table) - (- (length (ext:convert-string-to-bytes tester encoding)) - (length (ext:convert-string-to-bytes tester encoding - :end 1)))))) - #-UNICODE 1) - -;; Determines two values low,high such that -;; (subtypep type `(INTEGER ,low ,high)) -;; holds and low is as large as possible and high is as small as possible. -;; low = * means -infinity, high = * means infinity. -;; When (subtypep type 'INTEGER) is false, the values NIL,NIL are returned. -;; We need this function only for MAKE-ARRAY, UPGRADED-ARRAY-ELEMENT-TYPE and -;; OPEN and can therefore w.l.o.g. replace -;; type with `(OR ,type (MEMBER 0)) -#| ;; The original implementation calls canonicalize-type and then applies - ;; a particular SUBTYPE variant: - (defun subtype-integer (type) - (macrolet ((yes () '(return-from subtype-integer (values low high))) - (no () '(return-from subtype-integer nil)) - (unknown () '(return-from subtype-integer nil))) - (setq type (canonicalize-type type)) - (if (consp type) - (case (first type) - (MEMBER ; (MEMBER &rest objects) - ;; All elements must be of type INTEGER. - (let ((low 0) (high 0)) ; wlog! - (dolist (x (rest type) (yes)) - (unless (typep x 'INTEGER) (return (no))) - (setq low (min low x) high (max high x))))) - (OR ; (OR type*) - ;; Every type must be subtype of INTEGER. - (let ((low 0) (high 0)) ; wlog! - (dolist (type1 (rest type) (yes)) - (multiple-value-bind (low1 high1) (subtype-integer type1) - (unless low1 (return (no))) - (setq low (if (or (eq low '*) (eq low1 '*)) '* (min low low1)) - high (if (or (eq high '*) (eq high1 '*)) - '* (max high high1))))))) - (AND ; (AND type*) - ;; If one of the types is subtype of INTEGER, then yes, - ;; otherwise unknown. - (let ((low nil) (high nil)) - (dolist (type1 (rest type)) - (multiple-value-bind (low1 high1) (subtype-integer type1) - (when low1 - (if low - (setq low (if (eq low '*) low1 (if (eq low1 '*) low (max low low1))) - high (if (eq high '*) high1 (if (eq high1 '*) high (min high high1)))) - (setq low low1 high high1))))) - (if low - (progn - (when (and (numberp low) (numberp high) (not (<= low high))) - (setq low 0 high 0) ; type equivalent to NIL) - (yes)) - (unknown))))) - (setq type (list type))) - (if (eq (first type) 'INTEGER) - (let ((low (if (rest type) (second type) '*)) - (high (if (cddr type) (third type) '*))) - (when (consp low) - (setq low (first low)) - (when (numberp low) (incf low))) - (when (consp high) - (setq high (first high)) - (when (numberp high) (decf high))) - (when (and (numberp low) (numberp high) (not (<= low high))) ; type leer? - (setq low 0 high 0)) - (yes)) - (if (and (eq (first type) 'INTERVALS) (eq (second type) 'INTEGER)) - (let ((low (third type)) - (high (car (last type)))) - (when (consp low) - (setq low (first low)) - (when (numberp low) (incf low))) - (when (consp high) - (setq high (first high)) - (when (numberp high) (decf high))) - (yes)) - (unknown))))) -|# ;; This implementation inlines the (tail-recursive) canonicalize-type - ;; function. Its advantage is that it doesn't cons as much. - ;; (For example, (subtype-integer '(UNSIGNED-BYTE 8)) doesn't cons.) -(defun subtype-integer (type) - (macrolet ((yes () '(return-from subtype-integer (values low high))) - (no () '(return-from subtype-integer nil)) - (unknown () '(return-from subtype-integer nil))) - (setq type (expand-deftype type)) - (cond ((symbolp type) - (case type - (BIT (let ((low 0) (high 1)) (yes))) - (FIXNUM - (let ((low '#,most-negative-fixnum) - (high '#,most-positive-fixnum)) - (yes))) - ((INTEGER BIGNUM SIGNED-BYTE) - (let ((low '*) (high '*)) (yes))) - (UNSIGNED-BYTE - (let ((low 0) (high '*)) (yes))) - ((NIL) - (let ((low 0) (high 0)) (yes))) ; wlog! - (t (no)))) - ((and (consp type) (symbolp (first type))) - (unless (and (list-length type) (null (cdr (last type)))) - (typespec-error 'subtypep type)) - (case (first type) - (MEMBER ; (MEMBER &rest objects) - ;; All elements must be of type INTEGER. - (let ((low 0) (high 0)) ; wlog! - (dolist (x (rest type) (yes)) - (unless (typep x 'INTEGER) (return (no))) - (setq low (min low x) high (max high x))))) - (EQL ; (EQL object) - (let ((x (second type))) - (if (typep x 'INTEGER) - (let ((low (min 0 x)) (high (max 0 x))) (yes)) - (no)))) - (OR ; (OR type*) - ;; Every type must be subtype of INTEGER. - (let ((low 0) (high 0)) ; wlog! - (dolist (type1 (rest type) (yes)) - (multiple-value-bind (low1 high1) (subtype-integer type1) - (unless low1 (return (no))) - (setq low (if (or (eq low '*) (eq low1 '*)) - '* (min low low1)) - high (if (or (eq high '*) (eq high1 '*)) - '* (max high high1))))))) - (AND ; (AND type*) - ;; If one of the types is subtype of INTEGER, then yes, - ;; otherwise unknown. - (let ((low nil) (high nil)) - (dolist (type1 (rest type)) - (multiple-value-bind (low1 high1) (subtype-integer type1) - (when low1 - (if low - (setq low (if (eq low '*) low1 - (if (eq low1 '*) low - (max low low1))) - high (if (eq high '*) high1 - (if (eq high1 '*) high - (min high high1)))) - (setq low low1 - high high1))))) - (if low - (progn - (when (and (numberp low) (numberp high) - (not (<= low high))) - (setq low 0 high 0)) ; type equivalent to NIL - (yes)) - (unknown)))) - (INTEGER - (let ((low (if (rest type) (second type) '*)) - (high (if (cddr type) (third type) '*))) - (when (consp low) - (setq low (first low)) - (when (numberp low) (incf low))) - (when (consp high) - (setq high (first high)) - (when (numberp high) (decf high))) - (when (and (numberp low) (numberp high) (not (<= low high))) - (setq low 0 high 0)) ; type equivalent to NIL - (yes))) - (INTERVALS - (if (eq (second type) 'INTEGER) - (let ((low (third type)) - (high (car (last type)))) - (when (consp low) - (setq low (first low)) - (when (numberp low) (incf low))) - (when (consp high) - (setq high (first high)) - (when (numberp high) (decf high))) - (yes)) - (unknown))) - (MOD ; (MOD n) - (let ((n (second type))) - (unless (and (integerp n) (>= n 0)) - (typespec-error 'subtypep type)) - (if (eql n 0) - (no) - (let ((low 0) (high (1- n))) - (yes))))) - (SIGNED-BYTE ; (SIGNED-BYTE &optional s) - (let ((s (if (cdr type) (second type) '*))) - (if (eq s '*) - (let ((low '*) (high '*)) (yes)) - (progn - (unless (and (integerp s) (plusp s)) - (typespec-error 'subtypep type)) - (let ((n (ash 1 (1- s)))) ; (ash 1 *) == (expt 2 *) - (let ((low (- n)) (high (1- n))) - (yes))))))) - (UNSIGNED-BYTE ; (UNSIGNED-BYTE &optional s) - (let ((s (if (cdr type) (second type) '*))) - (if (eq s '*) - (let ((low 0) (high '*)) (yes)) - (progn - (unless (and (integerp s) (>= s 0)) - (typespec-error 'subtypep type)) - (let ((n (ash 1 s))) ; (ash 1 *) == (expt 2 *) - (let ((low 0) (high (1- n))) - (yes))))))) - (t (no)))) - ((clos::defined-class-p type) - (if (and (clos::built-in-class-p type) - (eq (get (clos:class-name type) 'CLOS::CLOSCLASS) type)) - (return-from subtype-integer - (subtype-integer (clos:class-name type))) - (no))) - ((clos::eql-specializer-p type) - (let ((x (clos::eql-specializer-singleton type))) - (if (typep x 'INTEGER) - (let ((low (min 0 x)) (high (max 0 x))) (yes)) - (no)))) - ((encodingp type) (no)) - (t (typespec-error 'subtypep type))))) - -#| TODO: Fix subtype-integer such that this works. -Henry Baker: - (defun type-null (x) - (values (and (eq 'bit (upgraded-array-element-type `(or bit ,x))) - (not (typep 0 x)) - (not (typep 1 x))) - t)) - (type-null '(and symbol number)) - (type-null '(and integer symbol)) - (type-null '(and integer character)) -|# - -;; Determines a sequence kind (an atom, as defined in defseq.lisp: one of -;; LIST - stands for LIST -;; VECTOR - stands for (VECTOR T) -;; STRING - stands for (VECTOR CHARACTER) -;; 1, 2, 4, 8, 16, 32 - stands for (VECTOR (UNSIGNED-BYTE n)) -;; 0 - stands for (VECTOR NIL)) -;; that indicates the sequence type meant by the given type. Other possible -;; return values are -;; SEQUENCE - denoting a type whose intersection with (OR LIST VECTOR) is not -;; subtype of LIST or VECTOR, or -;; NIL - indicating a type whose intersection with (OR LIST VECTOR) is empty. -;; When the type is (OR (VECTOR eltype1) ... (VECTOR eltypeN)), the chosen -;; element type is the smallest element type that contains all of eltype1 ... -;; eltypeN. -;; -;; User-defined sequence types are not supported here. -;; -;; This implementation inlines the (tail-recursive) canonicalize-type -;; function. Its advantage is that it doesn't cons as much. Also it employs -;; some heuristics and does not have the full power of SUBTYPEP. -(defun subtype-sequence (type) - (setq type (expand-deftype type)) - (cond ((symbolp type) - (case type - ((LIST CONS NULL) 'LIST) - ((NIL) 'NIL) - ((BIT-VECTOR SIMPLE-BIT-VECTOR) '1) - ((STRING SIMPLE-STRING BASE-STRING SIMPLE-BASE-STRING) 'STRING) - ((VECTOR SIMPLE-VECTOR ARRAY SIMPLE-ARRAY) 'VECTOR) - ((SEQUENCE) 'SEQUENCE) - (t 'NIL))) - ((and (consp type) (symbolp (first type))) - (unless (and (list-length type) (null (cdr (last type)))) - (typespec-error 'subtypep type)) - (case (first type) - (MEMBER ; (MEMBER &rest objects) - (let ((kind 'NIL)) - (dolist (x (rest type)) - (setq kind (sequence-type-union kind (type-of-sequence x)))) - kind)) - (EQL ; (EQL object) - (unless (eql (length type) 2) - (typespec-error 'subtypep type)) - (type-of-sequence (second type))) - (OR ; (OR type*) - (let ((kind 'NIL)) - (dolist (x (rest type)) - (setq kind (sequence-type-union kind (subtype-sequence x)))) - kind)) - (AND ; (AND type*) - (let ((kind 'SEQUENCE)) - (dolist (x (rest type)) - (setq kind (sequence-type-intersection kind (subtype-sequence x)))) - kind)) - ((SIMPLE-BIT-VECTOR BIT-VECTOR) ; (SIMPLE-BIT-VECTOR &optional size) - (when (cddr type) - (typespec-error 'subtypep type)) - '1) - ((SIMPLE-STRING STRING SIMPLE-BASE-STRING BASE-STRING) ; (SIMPLE-STRING &optional size) - (when (cddr type) - (typespec-error 'subtypep type)) - 'STRING) - (SIMPLE-VECTOR ; (SIMPLE-VECTOR &optional size) - (when (cddr type) - (typespec-error 'subtypep type)) - 'VECTOR) - ((VECTOR ARRAY SIMPLE-ARRAY) ; (VECTOR &optional el-type size), (ARRAY &optional el-type dimensions) - (when (cdddr type) - (typespec-error 'subtypep type)) - (let ((el-type (if (cdr type) (second type) '*))) - (if (eq el-type '*) - 'VECTOR - (let ((eltype (upgraded-array-element-type el-type))) - (cond ((eq eltype 'T) 'VECTOR) - ((eq eltype 'CHARACTER) 'STRING) - ((eq eltype 'BIT) '1) - ((and (consp eltype) (eq (first eltype) 'UNSIGNED-BYTE)) (second eltype)) - ((eq eltype 'NIL) '0) - (t (error (TEXT "~S is not up-to-date with ~S for element type ~S") - 'subtypep-sequence 'upgraded-array-element-type eltype))))))) - ((CONS) ; (CONS &optional cartype cdrtype) - (when (cdddr type) - (typespec-error 'subtypep type)) - 'LIST) - (t 'NIL))) - ((clos::defined-class-p type) - (if (and (clos::built-in-class-p type) - (eq (get (clos:class-name type) 'CLOS::CLOSCLASS) type)) - (subtype-sequence (clos:class-name type)) - 'NIL)) - ((clos::eql-specializer-p type) - (type-of-sequence (clos::eql-specializer-singleton type))) - (t 'NIL))) -(defun type-of-sequence (x) - (cond ((listp x) 'LIST) - ((vectorp x) - (let ((eltype (array-element-type x))) - (cond ((eq eltype 'T) 'VECTOR) - ((eq eltype 'CHARACTER) 'STRING) - ((eq eltype 'BIT) '1) - ((and (consp eltype) (eq (first eltype) 'UNSIGNED-BYTE)) (second eltype)) - ((eq eltype 'NIL) '0) - (t (error (TEXT "~S is not up-to-date with ~S for element type ~S") - 'type-of-sequence 'array-element-type eltype))))) - (t 'NIL))) -(defun sequence-type-union (t1 t2) - (cond ; Simple general rules. - ((eql t1 t2) t1) - ((eq t1 'NIL) t2) - ((eq t2 'NIL) t1) - ; Now the union of two different types. - ((or (eq t1 'SEQUENCE) (eq t2 'SEQUENCE)) 'SEQUENCE) - ((or (eq t1 'LIST) (eq t2 'LIST)) - ; union of LIST and a vector type - 'SEQUENCE) - ((or (eq t1 'VECTOR) (eq t2 'VECTOR)) 'VECTOR) - ((eql t1 0) t2) - ((eql t2 0) t1) - ((or (eq t1 'STRING) (eq t2 'STRING)) - ; union of STRING and an integer-vector type - 'VECTOR) - (t (max t1 t2)))) -(defun sequence-type-intersection (t1 t2) - (cond ; Simple general rules. - ((eql t1 t2) t1) - ((or (eq t1 'NIL) (eq t2 'NIL)) 'NIL) - ; Now the intersection of two different types. - ((eq t1 'SEQUENCE) t2) - ((eq t2 'SEQUENCE) t1) - ((or (eq t1 'LIST) (eq t2 'LIST)) - ; intersection of LIST and a vector type - 'NIL) - ((eq t1 'VECTOR) t2) - ((eq t2 'VECTOR) t1) - ((or (eql t1 0) (eql t2 0)) '0) - ((or (eq t1 'STRING) (eq t2 'STRING)) - ; intersection of STRING and an integer-vector type - '0) - (t (min t1 t2)))) - -;; ============================================================================ - -(defun type-expand (typespec &optional once-p) - (multiple-value-bind (expanded user-defined-p) - (expand-deftype typespec once-p) - (if user-defined-p (values expanded user-defined-p) - (cond ((symbolp typespec) - (cond ((or (get typespec 'TYPE-SYMBOL) (get typespec 'TYPE-LIST)) - (values typespec nil)) - ((or (get typespec 'DEFSTRUCT-DESCRIPTION) - (clos-class typespec)) - (values typespec nil)) - (t (typespec-error 'type-expand typespec)))) - ((and (consp typespec) (symbolp (first typespec))) - (case (first typespec) - ((SATISFIES MEMBER EQL NOT AND OR) (values typespec nil)) - (t (cond ((get (first typespec) 'TYPE-LIST) - (values typespec nil)) - (t (typespec-error 'type-expand typespec)))))) - ((clos::defined-class-p typespec) (values typespec nil)) - (t (typespec-error 'type-expand typespec)))))) - -;; ============================================================================ - -(unless (clos::funcallable-instance-p #'clos::class-name) - (fmakunbound 'clos::class-name)) - - -(keywordp :junk) - T - -(keywordp ::junk) - T - -(symbol-name ::junk) - "JUNK" - -(symbol-name :#junk) - "#JUNK" - -(symbol-name :#.junk) - "#.JUNK" diff --git a/tests/examplefiles/typescript_example b/tests/examplefiles/typescript_example deleted file mode 100644 index 760e2543..00000000 --- a/tests/examplefiles/typescript_example +++ /dev/null @@ -1,39 +0,0 @@ -class Animal { - constructor(public name) { } - move(meters) { - alert(this.name + " moved " + meters + "m."); - } -} - -class Snake extends Animal { - constructor(name) { super(name); } - move() { - alert("Slithering..."); - super.move(5); - } -} - -class Horse extends Animal { - constructor(name) { super(name); } - move() { - alert("Galloping..."); - super.move(45); - } -} - -@View({ - templateUrl: "app/components/LoginForm.html", - directives: [FORM_DIRECTIVES, NgIf] -}) -@Component({ - selector: "login-form" -}) -class LoginForm { - -} - -var sam = new Snake("Sammy the Python") -var tom: Animal = new Horse("Tommy the Palomino") - -sam.move() -tom.move(34) diff --git a/tests/examplefiles/typoscript_example b/tests/examplefiles/typoscript_example deleted file mode 100644 index e2fccf5d..00000000 --- a/tests/examplefiles/typoscript_example +++ /dev/null @@ -1,1930 +0,0 @@ -# *************************************************************************** -# Notice: "styles." (and "temp.") objects are UNSET after template parsing! -# Use "lib." for persisting storage of objects. -# *************************************************************************** - - - -page.80 = RECORDS -page.80 { - source = 1 - tables = tt_address - conf.tt_address = COA - conf.tt_address { - 20 = TEXT - 20.field = email - 20.typolink.parameter.field = email - } -} - - /* -page.200 = PHP_SCRIPT_EXT -page.200 { - 1 = TMENU - 1.wrap =
    |
    - 1.expAll = 1 - 1.submenuObjSuffixes = a |*| |*| b - 1.NO.allWrap = |
    - - 2 = TMENU - 2.NO.allWrap =
    |
    - - 2a = TMENU - 2a.NO.allWrap =
    |
    -* - 2b = TMENU - 2b.NO.allWrap =
    |
    -} -*/ - - # Add the CSS and JS files -page { - includeCSS { # comment at the end of a line - file99 = fileadmin/your-fancybox.css - } - includeJSFooter { - fancybox = fileadmin/your-fancybox.js - } -} - - # Change the default rendering of images to match lightbox requirements -tt_content.image.20.1.imageLinkWrap { - JSwindow = 0 - test = MyExtension\Path\To\Class - - directImageLink = 1 - linkParams.ATagParams { - dataWrap = class= "lightbox" rel="fancybox{field:uid}" - } -} - -tt_content.image.20.1.imageLinkWrap > -tt_content.image.20.1.imageLinkWrap = 1 -tt_content.image.20.1.imageLinkWrap { - enable = 1 - typolink { - # directly link to the recent image - parameter.cObject = IMG_RESOURCE - parameter.cObject.file.import.data = TSFE:lastImageInfo|origFile - parameter.cObject.file.maxW = {$styles.content.imgtext.maxW} - parameter.override.listNum.stdWrap.data = register : IMAGE_NUM_CURRENT - title.field = imagecaption // title - title.split.token.char = 10 - title.if.isTrue.field = imagecaption // header - title.split.token.char = 10 - title.split.returnKey.data = register : IMAGE_NUM_CURRENT - parameter.cObject = IMG_RESOURCE - parameter.cObject.file.import.data = TSFE:lastImageInfo|origFile - ATagParams = target="_blank" - } -} - -10 = IMAGE -10 { - # point to the image - file = fileadmin/demo/lorem_ipsum/images/a4.jpg - # make it rather small - file.width = 80 - # add a link to tx_cms_showpic.php that shows the original image - imageLinkWrap = 1 - imageLinkWrap { - enable = 1 - # JSwindow = 1 - } -} - -# Clear out any constants in this reserved room! -styles.content > - -# get content -styles.content.get = CONTENT -styles.content.get { - table = tt_content - select.orderBy = sorting - select.where = colPos=0 - select.languageField = sys_language_uid -} - -# get content, left -styles.content.getLeft < styles.content.get -styles.content.getLeft.select.where = colPos=1 - -# get content, right -styles.content.getRight < styles.content.get -styles.content.getRight.select.where = colPos=2 - -# get content, margin -styles.content.getBorder < styles.content.get -styles.content.getBorder.select.where = colPos=3 - -# get news -styles.content.getNews < styles.content.get -styles.content.getNews.select.pidInList = {$styles.content.getNews.newsPid} - -# Edit page object: -styles.content.editPanelPage = COA -styles.content.editPanelPage { - 10 = EDITPANEL - 10 { - allow = toolbar,move,hide - label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.page - label.wrap = | %s - } -} - - - - - - - - - - - -# ********************************************************************* -# "lib." objects are preserved from unsetting after template parsing -# ********************************************************************* - -# Creates persistent ParseFunc setup for non-HTML content. This is recommended to use (as a reference!) -lib.parseFunc { - makelinks = 1 - makelinks.http.keep = {$styles.content.links.keep} - makelinks.http.extTarget = {$styles.content.links.extTarget} - makelinks.mailto.keep = path - tags { - link = TEXT - link { - current = 1 - typolink.parameter.data = parameters : allParams - typolink.extTarget = {$styles.content.links.extTarget} - typolink.target = {$styles.content.links.target} - parseFunc.constants =1 - } - } - allowTags = {$styles.content.links.allowTags} - denyTags = * - sword = | - constants = 1 - - nonTypoTagStdWrap.HTMLparser = 1 - nonTypoTagStdWrap.HTMLparser { - keepNonMatchedTags = 1 - htmlSpecialChars = 2 - } -} - -# good old parsefunc in "styles.content.parseFunc" is created for backwards compatibility. Don't use it, just ignore. -styles.content.parseFunc < lib.parseFunc - -# Creates persistent ParseFunc setup for RTE content (which is mainly HTML) based on the "ts_css" transformation. -lib.parseFunc_RTE < lib.parseFunc -lib.parseFunc_RTE { - // makelinks > - # Processing and
    blocks separately - externalBlocks = table, blockquote, dd, dl, ol, ul, div - externalBlocks { - # The blockquote content is passed into parseFunc again... - blockquote.stripNL=1 - blockquote.callRecursive=1 - blockquote.callRecursive.tagStdWrap.HTMLparser = 1 - blockquote.callRecursive.tagStdWrap.HTMLparser.tags.blockquote.overrideAttribs = style="margin-bottom:0;margin-top:0;" - - ol.stripNL=1 - ol.stdWrap.parseFunc = < lib.parseFunc - - ul.stripNL=1 - ul.stdWrap.parseFunc = < lib.parseFunc - - table.stripNL=1 - table.stdWrap.HTMLparser = 1 - table.stdWrap.HTMLparser.tags.table.fixAttrib.class { - default = contenttable - always = 1 - list = contenttable - } - table.stdWrap.HTMLparser.keepNonMatchedTags = 1 - table.HTMLtableCells=1 - table.HTMLtableCells { - default.callRecursive=1 - addChr10BetweenParagraphs=1 - } - div.stripNL = 1 - div.callRecursive = 1 - - # Definition list processing - dl < .div - dd < .div - } - nonTypoTagStdWrap.encapsLines { - encapsTagList = p,pre,h1,h2,h3,h4,h5,h6,hr,dt - remapTag.DIV = P - nonWrappedTag = P - innerStdWrap_all.ifBlank =   - addAttributes.P.class = bodytext - addAttributes.P.class.setOnly=blank - } - nonTypoTagStdWrap.HTMLparser = 1 - nonTypoTagStdWrap.HTMLparser { - keepNonMatchedTags = 1 - htmlSpecialChars = 2 - } -} - - -# Content header: -lib.stdheader = COA -lib.stdheader { - - # Create align style-attribute for tags - 2 = LOAD_REGISTER - 2.headerStyle.field = header_position - 2.headerStyle.required = 1 - 2.headerStyle.noTrimWrap = | style="text-align:|;"| - - # Create class="csc-firstHeader" attribute for tags - 3 = LOAD_REGISTER - 3.headerClass = csc-firstHeader - 3.headerClass.if.value=1 - 3.headerClass.if.equals.data = cObj:parentRecordNumber - 3.headerClass.noTrimWrap = | class="|"| - - # Date format: - 5 = TEXT - 5.field = date - 5.if.isTrue.field = date - 5.strftime = %x - 5.wrap =

    |

    - 5.prefixComment = 2 | Header date: - - # This CASE cObject renders the header content: - # currentValue is set to the header data, possibly wrapped in link-tags. - 10 = CASE - 10.setCurrent { - field = header - htmlSpecialChars = 1 - typolink.parameter.field = header_link - } - 10.key.field = header_layout - 10.key.ifEmpty = {$content.defaultHeaderType} - 10.key.ifEmpty.override.data = register: defaultHeaderType - - 10.1 = TEXT - 10.1.current = 1 - 10.1.dataWrap = | - - 10.2 < .10.1 - 10.2.dataWrap = | - - 10.3 < .10.1 - 10.3.dataWrap = | - - 10.4 < .10.1 - 10.4.dataWrap = | - - 10.5 < .10.1 - 10.5.dataWrap = | - - # Pops the used registers off the stack: - 98 = RESTORE_REGISTER - 99 = RESTORE_REGISTER - - # Post-processing: - stdWrap.fieldRequired = header - stdWrap.if { - equals.field = header_layout - value = 100 - negate = 1 - } - - stdWrap.editIcons = tt_content : header, [header_layout | header_position], [header_link|date] - stdWrap.editIcons.beforeLastTag = 1 - stdWrap.editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.header - - stdWrap.dataWrap =
    |
    - stdWrap.prefixComment = 2 | Header: -} - - - - - - - - - - - - - - - -#****************************************************** -# Including library for processing of some elements: -#****************************************************** -includeLibs.tx_cssstyledcontent_pi1 = EXT:css_styled_content/pi1/class.tx_cssstyledcontent_pi1.php - - -#********************************** -# tt_content is started -#********************************** -tt_content > -tt_content = CASE -tt_content.key.field = CType -tt_content.stdWrap { - innerWrap.cObject = CASE - innerWrap.cObject { - key.field = section_frame - - default = COA - default { - 10 = TEXT - 10 { - value =
    |
    - } - - 1 =< tt_content.stdWrap.innerWrap.cObject.default - 1.15.value = csc-frame csc-frame-invisible - - 5 =< tt_content.stdWrap.innerWrap.cObject.default - 5.15.value = csc-frame csc-frame-rulerBefore - - 6 =< tt_content.stdWrap.innerWrap.cObject.default - 6.15.value = csc-frame csc-frame-rulerAfter - - 10 =< tt_content.stdWrap.innerWrap.cObject.default - 10.15.value = csc-frame csc-frame-indent - - 11 =< tt_content.stdWrap.innerWrap.cObject.default - 11.15.value = csc-frame csc-frame-indent3366 - - 12 =< tt_content.stdWrap.innerWrap.cObject.default - 12.15.value = csc-frame csc-frame-indent6633 - - 20 =< tt_content.stdWrap.innerWrap.cObject.default - 20.15.value = csc-frame csc-frame-frame1 - - 21 =< tt_content.stdWrap.innerWrap.cObject.default - 21.15.value = csc-frame csc-frame-frame2 - - 66 = COA - 66 { - 10 = TEXT - 10 { - value = - insertData = 1 - } - - 20 = COA - 20 { - 10 = TEXT - 10 { - value = {$content.spaceBefore} - wrap = |+ - if.isTrue = {$content.spaceBefore} - } - - 20 = TEXT - 20 { - field = spaceBefore - } - - stdWrap { - prioriCalc = intval - wrap = margin-top:|px; - required = 1 - ifEmpty.value = - wrap2 =
    - } - } - - 30 = TEXT - 30 { - value = | - } - - 40 < .20 - 40 { - 10 { - value = {$content.spaceAfter} - if.isTrue = {$content.spaceAfter} - } - 20.field = spaceAfter - stdWrap.wrap = margin-bottom:|px; - } - } - - } - - innerWrap2 = |

    {LLL:EXT:css_styled_content/pi1/locallang.xml:label.toTop}

    - innerWrap2.insertData = 1 - innerWrap2.fieldRequired = linkToTop - - prepend = TEXT - prepend.dataWrap = - prepend.if.isTrue.field = _LOCALIZED_UID - - editPanel = 1 - editPanel { - allow = move,new,edit,hide,delete - line = 5 - label = %s - onlyCurrentPid = 1 - previewBorder = 4 - edit.displayRecord = 1 - } - - prefixComment = 1 | CONTENT ELEMENT, uid:{field:uid}/{field:CType} -} - - - -# ***************** -# CType: header -# ***************** -# See Object path "lib.stdheader" -tt_content.header = COA -tt_content.header { - 10 = < lib.stdheader - - 20 = TEXT - 20 { - field = subheader - required = 1 - - dataWrap =

    |

    - htmlSpecialChars = 1 - - editIcons = tt_content:subheader,layout - editIcons.beforeLastTag = 1 - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.subheader - - prefixComment = 2 | Subheader: - } -} - - - -# ***************** -# CType: text -# ***************** -tt_content.text = COA -tt_content.text { - 10 = < lib.stdheader - - 20 = TEXT - 20 { - field = bodytext - required = 1 - - parseFunc = < lib.parseFunc_RTE - - editIcons = tt_content:bodytext, rte_enabled - editIcons.beforeLastTag = 1 - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.bodytext - - prefixComment = 2 | Text: - } -} - - - -# ***************** -# CType: image -# ***************** -# (also used for rendering 'textpic' type): -tt_content.image = COA -tt_content.image.10 = < lib.stdheader -tt_content.image.20 = USER -tt_content.image.20 { - userFunc = tx_cssstyledcontent_pi1->render_textpic - - # Image source - imgList.field = image - imgPath = uploads/pics/ - - # Single image rendering - imgObjNum = 1 - 1 { - file.import.current = 1 - file.width.field = imagewidth - imageLinkWrap = 1 - imageLinkWrap { - bodyTag = - wrap = | - width = {$styles.content.imgtext.linkWrap.width} - height = {$styles.content.imgtext.linkWrap.height} - effects = {$styles.content.imgtext.linkWrap.effects} - - JSwindow = 1 - JSwindow.newWindow = {$styles.content.imgtext.linkWrap.newWindow} - JSwindow.if.isFalse = {$styles.content.imgtext.linkWrap.lightboxEnabled} - - directImageLink = {$styles.content.imgtext.linkWrap.lightboxEnabled} - - enable.field = image_zoom - enable.ifEmpty.typolink.parameter.field = image_link - enable.ifEmpty.typolink.parameter.listNum.splitChar = 10 - enable.ifEmpty.typolink.parameter.listNum.stdWrap.data = register : IMAGE_NUM_CURRENT - enable.ifEmpty.typolink.returnLast = url - - typolink.parameter.field = image_link - typolink.parameter.listNum.splitChar = 10 - typolink.parameter.listNum.stdWrap.data = register : IMAGE_NUM_CURRENT - typolink.target = {$styles.content.links.target} - typolink.extTarget = {$styles.content.links.extTarget} - - linkParams.ATagParams.dataWrap = class="{$styles.content.imgtext.linkWrap.lightboxCssClass}" rel="{$styles.content.imgtext.linkWrap.lightboxRelAttribute}" - } - - altText = TEXT - altText { - field = altText - stripHtml = 1 - split.token.char = 10 - split.token.if.isTrue = {$styles.content.imgtext.imageTextSplit} - split.returnKey.data = register : IMAGE_NUM_CURRENT - } - - titleText < .altText - titleText.field = titleText - - longdescURL < .altText - longdescURL.field = longdescURL - - emptyTitleHandling = {$styles.content.imgtext.emptyTitleHandling} - titleInLink = {$styles.content.imgtext.titleInLink} - titleInLinkAndImg = {$styles.content.imgtext.titleInLinkAndImg} - } - - textPos.field = imageorient - maxW = {$styles.content.imgtext.maxW} - maxW.override.data = register:maxImageWidth - maxWInText = {$styles.content.imgtext.maxWInText} - maxWInText.override.data = register:maxImageWidthInText - - equalH.field = imageheight - - image_compression.field = image_compression - image_effects.field = image_effects - - noRows.field = image_noRows - - cols.field = imagecols - border.field = imageborder - - caption { - 1 = TEXT - 1 { - field = imagecaption - required = 1 - parseFunc =< lib.parseFunc - br = 1 - split.token.char = 10 - split.token.if.isPositive = {$styles.content.imgtext.imageTextSplit} + {$styles.content.imgtext.captionSplit} - split.returnKey.data = register : IMAGE_NUM_CURRENT - } - } - # captionSplit is deprecated, use imageTextSplit instead - captionSplit = {$styles.content.imgtext.captionSplit} - captionAlign.field = imagecaption_position - # caption/alttext/title/longdescURL splitting - imageTextSplit = {$styles.content.imgtext.imageTextSplit} - - borderCol = {$styles.content.imgtext.borderColor} - borderThick = {$styles.content.imgtext.borderThick} - borderClass = {$styles.content.imgtext.borderClass} - colSpace = {$styles.content.imgtext.colSpace} - rowSpace = {$styles.content.imgtext.rowSpace} - textMargin = {$styles.content.imgtext.textMargin} - - borderSpace = {$styles.content.imgtext.borderSpace} - separateRows = {$styles.content.imgtext.separateRows} - addClasses = - addClassesImage = - addClassesImage.ifEmpty = csc-textpic-firstcol csc-textpic-lastcol - addClassesImage.override = csc-textpic-firstcol |*| |*| csc-textpic-lastcol - addClassesImage.override.if { - isGreaterThan.field = imagecols - value = 1 - } - - # - imageStdWrap.dataWrap =
    |
    - imageStdWrapNoWidth.wrap =
    |
    - - # if noRows is set, wrap around each column: - imageColumnStdWrap.dataWrap =
    |
    - - layout = CASE - layout { - key.field = imageorient - # above-center - default = TEXT - default.value =
    ###IMAGES######TEXT###
    - # above-right - 1 = TEXT - 1.value =
    ###IMAGES######TEXT###
    - # above-left - 2 = TEXT - 2.value =
    ###IMAGES######TEXT###
    - # below-center - 8 = TEXT - 8.value =
    ###TEXT######IMAGES###
    - # below-right - 9 = TEXT - 9.value =
    ###TEXT######IMAGES###
    - # below-left - 10 = TEXT - 10.value =
    ###TEXT######IMAGES###
    - # intext-right - 17 = TEXT - 17.value =
    ###IMAGES######TEXT###
    - 17.override =
    ###IMAGES######TEXT###
    - 17.override.if.isTrue = {$styles.content.imgtext.addIntextClearer} - # intext-left - 18 = TEXT - 18.value =
    ###IMAGES######TEXT###
    - 18.override =
    ###IMAGES######TEXT###
    - 18.override.if.isTrue = {$styles.content.imgtext.addIntextClearer} - # intext-right-nowrap - 25 = TEXT - 25.value =
    ###IMAGES###
    ###TEXT###
    - 25.insertData = 1 - # intext-left-nowrap - 26 = TEXT - 26.value =
    ###IMAGES###
    ###TEXT###
    - 26.insertData = 1 - } - - rendering { - dl { - # Choose another rendering for special edge cases - fallbackRendering = COA - fallbackRendering { - # Just one image without a caption => don't need the dl-overhead, use the "simple" rendering - 10 = TEXT - 10 { - if { - isFalse.field = imagecaption - value = 1 - equals.data = register:imageCount - } - value = simple - } - - # Multiple images and one global caption => "ul" - 20 = TEXT - 20 { - if { - value = 1 - isGreaterThan.data = register:imageCount - isTrue.if.isTrue.data = register:renderGlobalCaption - isTrue.field = imagecaption - } - value = ul - } - - # Multiple images and no caption at all => "ul" - 30 = TEXT - 30 { - if { - value = 1 - isGreaterThan.data = register:imageCount - isFalse.field = imagecaption - } - value = ul - } - } - imageRowStdWrap.dataWrap =
    |
    - imageLastRowStdWrap.dataWrap =
    |
    - noRowsStdWrap.wrap = - oneImageStdWrap.dataWrap =
    |
    - imgTagStdWrap.wrap =
    |
    - editIconsStdWrap.wrap =
    |
    - caption { - required = 1 - wrap =
    |
    - } - } - ul { - # Just one image without a caption => don't need the ul-overhead, use the "simple" rendering - fallbackRendering < tt_content.image.20.rendering.dl.fallbackRendering.10 - imageRowStdWrap.dataWrap =
      |
    - imageLastRowStdWrap.dataWrap =
      |
    - noRowsStdWrap.wrap =
      |
    - oneImageStdWrap.dataWrap =
  • |
  • - imgTagStdWrap.wrap = - editIconsStdWrap.wrap =
    |
    - caption.wrap =
    |
    - } - div { - # Just one image without a caption => don't need the div-overhead, use the "simple" rendering - fallbackRendering < tt_content.image.20.rendering.dl.fallbackRendering.10 - imageRowStdWrap.dataWrap =
    |
    - imageLastRowStdWrap.dataWrap =
    |
    - noRowsStdWrap.wrap = - oneImageStdWrap.dataWrap =
    |
    - imgTagStdWrap.wrap =
    |
    - editIconsStdWrap.wrap =
    |
    - caption.wrap =
    |
    - } - simple { - imageRowStdWrap.dataWrap = | - imageLastRowStdWrap.dataWrap = | - noRowsStdWrap.wrap = - oneImageStdWrap.dataWrap = | - imgTagStdWrap.wrap = | - editIconsStdWrap.wrap = | - caption.wrap =
    |
    - imageStdWrap.dataWrap =
    |
    - imageStdWrapNoWidth.wrap =
    |
    - } - } - renderMethod = dl - - editIcons = tt_content : image [imageorient|imagewidth|imageheight], [imagecols|image_noRows|imageborder],[image_link|image_zoom],[image_compression|image_effects|image_frames],imagecaption[imagecaption_position] - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.images - - caption.editIcons = tt_content : imagecaption[imagecaption_position] - caption.editIcons.beforeLastTag=1 - caption.editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.caption - - stdWrap.prefixComment = 2 | Image block: -} - -# ***************** -# CType: textpic -# ***************** -tt_content.textpic = COA -tt_content.textpic { - 10 = COA - 10.if.value = 25 - 10.if.isLessThan.field = imageorient - 10.10 = < lib.stdheader - - 20 = < tt_content.image.20 - 20 { - text.10 = COA - text.10 { - if.value = 24 - if.isGreaterThan.field = imageorient - 10 = < lib.stdheader - 10.stdWrap.dataWrap =
    |
    - } - text.20 = < tt_content.text.20 - text.wrap =
    |
    - } -} - - - -# ***************** -# CType: bullet -# ***************** -tt_content.bullets = COA -tt_content.bullets { - 10 = < lib.stdheader - - 20 = TEXT - 20 { - field = bodytext - trim = 1 - split{ - token.char = 10 - cObjNum = |*|1|| 2|*| - 1.current = 1 - 1.parseFunc =< lib.parseFunc - 1.wrap =
  • |
  • - - 2.current = 1 - 2.parseFunc =< lib.parseFunc - 2.wrap =
  • |
  • - } - dataWrap =
      |
    - editIcons = tt_content: bodytext, [layout] - editIcons.beforeLastTag = 1 - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.php:eIcon.bullets - - prefixComment = 2 | Bullet list: - } -} - - - -# ***************** -# CType: table -# ***************** -# Rendered by a PHP function specifically written to handle CE tables. See css_styled_content/pi1/class.tx_cssstyledcontent_pi1.php -tt_content.table = COA -tt_content.table { - 10 = < lib.stdheader - - 20 = USER - 20.userFunc = tx_cssstyledcontent_pi1->render_table - 20.field = bodytext - - 20.color { - default = - 1 = #EDEBF1 - 2 = #F5FFAA - } - 20.tableParams_0 { - border = - cellpadding = - cellspacing = - } - 20.tableParams_1 { - border = - cellpadding = - cellspacing = - } - 20.tableParams_2 { - border = - cellpadding = - cellspacing = - } - 20.tableParams_3 { - border = - cellpadding = - cellspacing = - } - 20.innerStdWrap.wrap = | - 20.innerStdWrap.parseFunc = < lib.parseFunc - - 20.stdWrap { - editIcons = tt_content: cols, bodytext, [layout], [table_bgColor|table_border|table_cellspacing|table_cellpadding] - editIcons.beforeLastTag = 1 - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.table - - prefixComment = 2 | Table: - } -} - - -# ***************** -# CType: uploads -# ***************** -# Rendered by a PHP function specifically written to handle CE filelists. See css_styled_content/pi1/class.tx_cssstyledcontent_pi1.php -tt_content.uploads = COA -tt_content.uploads { - 10 = < lib.stdheader - - 20 = USER - 20.userFunc = tx_cssstyledcontent_pi1->render_uploads - 20.field = media - 20.filePath.field = select_key - - 20 { - # Rendering for each file (e.g. rows of the table) as a cObject - itemRendering = COA - itemRendering { - wrap =
    | |*| | || | |*| - - 10 = TEXT - 10.data = register:linkedIcon - 10.wrap = - 10.if.isPositive.field = layout - - 20 = COA - 20.wrap = - 20.1 = TEXT - 20.1 { - data = register:linkedLabel - wrap =

    |

    - } - 20.2 = TEXT - 20.2 { - data = register:description - wrap =

    |

    - required = 1 - htmlSpecialChars = 1 - } - - 30 = TEXT - 30.if.isTrue.field = filelink_size - 30.data = register:fileSize - 30.wrap = - 30.bytes = 1 - 30.bytes.labels = {$styles.content.uploads.filesizeBytesLabels} - } - useSpacesInLinkText = 0 - stripFileExtensionFromLinkText = 0 - } - - 20.color { - default = - 1 = #EDEBF1 - 2 = #F5FFAA - } - 20.tableParams_0 { - border = - cellpadding = - cellspacing = - } - 20.tableParams_1 { - border = - cellpadding = - cellspacing = - } - 20.tableParams_2 { - border = - cellpadding = - cellspacing = - } - 20.tableParams_3 { - border = - cellpadding = - cellspacing = - } - - 20.linkProc { - target = _blank - jumpurl = {$styles.content.uploads.jumpurl} - jumpurl.secure = {$styles.content.uploads.jumpurl_secure} - jumpurl.secure.mimeTypes = {$styles.content.uploads.jumpurl_secure_mimeTypes} - removePrependedNumbers = 1 - - iconCObject = IMAGE - iconCObject.file.import.data = register : ICON_REL_PATH - iconCObject.file.width = 150 - } - - 20.filesize { - bytes = 1 - bytes.labels = {$styles.content.uploads.filesizeBytesLabels} - } - - 20.stdWrap { - editIcons = tt_content: media, layout [table_bgColor|table_border|table_cellspacing|table_cellpadding], filelink_size, imagecaption - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.filelist - - prefixComment = 2 | File list: - } -} - - -# ****************** -# CType: multimedia -# ****************** -tt_content.multimedia = COA -tt_content.multimedia { - 10 = < lib.stdheader - - 20 = MULTIMEDIA - 20.file.field = multimedia - 20.file.wrap = uploads/media/ - 20.file.listNum = 0 - 20.params.field = bodytext - - 20.stdWrap { - editIcons = tt_content: multimedia, bodytext - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.multimedia - - prefixComment = 2 | Multimedia element: - } -} - -# ***************** -# CType: swfobject -# ***************** -tt_content.swfobject = COA -tt_content.swfobject { - 10 = < lib.stdheader - - 20 = SWFOBJECT - 20 { - file = - width = - height = - - flexParams.field = pi_flexform - - alternativeContent.field = bodytext - - layout = ###SWFOBJECT### - - video { - player = {$styles.content.media.videoPlayer} - - defaultWidth = {$styles.content.media.defaultVideoWidth} - defaultHeight = {$styles.content.media.defaultVideoHeight} - - default { - params.quality = high - params.menu = false - params.allowScriptAccess = sameDomain - params.allowFullScreen = true - } - mapping { - - } - } - - audio { - player = {$styles.content.media.audioPlayer} - - defaultWidth = {$styles.content.media.defaultAudioWidth} - defaultHeight = {$styles.content.media.defaultAudioHeight} - - default { - params.quality = high - params.allowScriptAccess = sameDomain - params.menu = false - } - mapping { - flashvars.file = soundFile - } - } - - } - 20.stdWrap { - editIcons = tt_content: multimedia, imagewidth, imageheight, pi_flexform, bodytext - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.multimedia - - prefixComment = 2 | SWFobject element: - } -} - -# ***************** -# CType: qtobject -# ***************** -tt_content.qtobject = COA -tt_content.qtobject { - 10 = < lib.stdheader - - 20 = QTOBJECT - 20 { - file = - width = - height = - - flexParams.field = pi_flexform - - alternativeContent.field = bodytext - - layout = ###QTOBJECT### - - video { - player = {$styles.content.media.videoPlayer} - - defaultWidth = {$styles.content.media.defaultVideoWidth} - defaultHeight = {$styles.content.media.defaultVideoHeight} - - default { - params.quality = high - params.menu = false - params.allowScriptAccess = sameDomain - params.allowFullScreen = true - } - mapping { - - } - } - - audio { - player = {$styles.content.media.audioPlayer} - - defaultWidth = {$styles.content.media.defaultAudioWidth} - defaultHeight = {$styles.content.media.defaultAudioHeight} - - default { - params.quality = high - params.allowScriptAccess = sameDomain - params.menu = false - } - mapping { - flashvars.file = soundFile - } - } - } - 20.stdWrap { - editIcons = tt_content: multimedia, imagewidth, imageheight, pi_flexform, bodytext - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.multimedia - - prefixComment = 2 | QTobject element: - } -} - -# ***************** -# CType: media -# ***************** -tt_content.media = COA -tt_content.media { - 10 = < lib.stdheader - - 20 = MEDIA - 20 { - - flexParams.field = pi_flexform - alternativeContent < tt_content.text.20 - alternativeContent.field = bodytext - - type = video - renderType = auto - allowEmptyUrl = 0 - forcePlayer = 1 - - fileExtHandler { - default = MEDIA - avi = MEDIA - asf = MEDIA - class = MEDIA - wmv = MEDIA - mp3 = SWF - mp4 = SWF - m4v = SWF - swa = SWF - flv = SWF - swf = SWF - mov = QT - m4v = QT - m4a = QT - } - - mimeConf.swfobject < tt_content.swfobject.20 - mimeConf.qtobject < tt_content.qtobject.20 - - } - 20.stdWrap { - editIcons = tt_content: pi_flexform, bodytext - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.multimedia - - prefixComment = 2 | Media element: - } -} - -# ****************** -# CType: mailform -# ****************** -tt_content.mailform = COA -tt_content.mailform.10 = < lib.stdheader -tt_content.mailform.20 = FORM -tt_content.mailform.20 { - accessibility = 1 - noWrapAttr=1 - formName = mailform - dontMd5FieldNames = 1 - layout =
    ###LABEL### ###FIELD###
    - labelWrap.wrap = | - commentWrap.wrap = | - radioWrap.wrap = |
    - radioWrap.accessibilityWrap = ###RADIO_GROUP_LABEL###| - REQ = 1 - REQ.labelWrap.wrap = | - COMMENT.layout =
    ###LABEL###
    - RADIO.layout =
    ###LABEL### ###FIELD###
    - LABEL.layout =
    ###LABEL### ###FIELD###
    - target = {$styles.content.mailform.target} - goodMess = {$styles.content.mailform.goodMess} - badMess = {$styles.content.mailform.badMess} - redirect.field = pages - redirect.listNum = 0 - recipient.field = subheader - data.field = bodytext - locationData = 1 - hiddenFields.stdWrap.wrap =
    |
    - - params.radio = class="csc-mailform-radio" - params.check = class="csc-mailform-check" - params.submit = class="csc-mailform-submit" - - stdWrap.wrap =
    |
    - stdWrap { - editIcons = tt_content: bodytext, pages, subheader - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.form - - prefixComment = 2 | Mail form inserted: - } -} - - -# ****************** -# CType: search -# ****************** -tt_content.search = COA -tt_content.search.10 = < lib.stdheader -# Result: -tt_content.search.20 = SEARCHRESULT -tt_content.search.20 { - allowedCols = pages.title-subtitle-keywords-description : tt_content.header-bodytext-imagecaption : tt_address.name-title-address-email-company-city-country : tt_links.title-note-note2-url : tt_board.subject-message-author-email : tt_calender.title-note : tt_products.title-note-itemnumber - languageField.tt_content = sys_language_uid - renderObj = COA - renderObj { - - 10 = TEXT - 10.field = pages_title - 10.htmlSpecialChars = 1 - 10.typolink { - parameter.field = uid - target = {$styles.content.searchresult.resultTarget} - additionalParams.data = register:SWORD_PARAMS - additionalParams.required = 1 - additionalParams.wrap = &no_cache=1 - } - 10.htmlSpecialChars = 1 - 10.wrap =

    |

    - - 20 = COA - 20 { - 10 = TEXT - 10.field = tt_content_bodytext - 10.stripHtml = 1 - 10.htmlSpecialChars = 1 - } - 20.stdWrap.crop = 200 | ... - 20.stdWrap.wrap =

    |

    - } - - layout = COA - layout { - wrap =
    |||
    |
    ###RESULT### - - 10 = TEXT - 10.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.resultRange - 10.wrap =

    |

    - - 20 = TEXT - 20.value = ###PREV###   ###NEXT### - 20.wrap =

    |

    - } - - noResultObj = COA - noResultObj { - 10 = TEXT - 10.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.emptySearch - 10.wrap =

    |

    - } - - next = TEXT - next.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.searchResultNext - - prev = TEXT - prev.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.searchResultPrev - - target = {$styles.content.searchresult.target} - range = 20 - - stdWrap.prefixComment = 2 | Search result: -} - -# Form: -tt_content.search.30 < tt_content.mailform.20 -tt_content.search.30 { - goodMess = {$styles.content.searchform.goodMess} - redirect > - recipient > - data > - dataArray { - 10.label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.searchWord - 10.type = sword=input - 20.label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.searchIn - 20.type = scols=select - 20.valueArray { - 10.label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.headersKeywords - 10.value = pages.title-subtitle-keywords-description:tt_content.header - 20.label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.pageContent - 20.value = tt_content.header-bodytext-imagecaption - } - 30.type = stype=hidden - 30.value = L0 - 40.type = submit=submit - 40.value.data = LLL:EXT:css_styled_content/pi1/locallang.xml:search.searchButton - } - type.field = pages - type.listNum = 0 - locationData = HTTP_POST_VARS - no_cache = 1 - - stdWrap.wrap = |
    - stdWrap { - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.search - - prefixComment = 2 | Search form inserted: - } -} - - -# ****************** -# CType: login -# ****************** -tt_content.login < tt_content.mailform -tt_content.login.10 = < lib.stdheader -tt_content.login.20 { - goodMess = {$styles.content.loginform.goodMess} - redirect > - recipient > - data > - dataArray { - 10.label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:login.username - 10.type = *user=input - 20.label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:login.password - 20.type = *pass=password - 30.type = logintype=hidden - 30.value = login - 40.type = submit=submit - 40.value.data = LLL:EXT:css_styled_content/pi1/locallang.xml:login.login - } - type.field = pages - type.listNum = 0 - target = {$styles.content.loginform.target} - locationData = 0 - hiddenFields.pid = TEXT - hiddenFields.pid { - value = {$styles.content.loginform.pid} - override.field = pages - override.listNum = 1 - } - - stdWrap.wrap =
    |
    - stdWrap { - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.login - - prefixComment = 2 | Login/Logout form: - } -} -[loginUser = *] -tt_content.login.20 { - dataArray > - dataArray { - 10.label.data = LLL:EXT:css_styled_content/pi1/locallang.xml:login.username - 10.label.wrap = |  - 30.type = logintype=hidden - 30.value = logout - 40.type = submit=submit - 40.value.data = LLL:EXT:css_styled_content/pi1/locallang.xml:login.logout - } -} -[global] - - -# ****************** -# CType: splash -# ****************** -# Deprecated element. -# Still here for backwards compliance with plugins using the "text box" type. -tt_content.splash = CASE -tt_content.splash.key.field = splash_layout -tt_content.splash.stdWrap { - prefixComment = 2 | Textbox inserted (Deprecated) -} -tt_content.splash.default = COA -tt_content.splash.default { - 20 = CTABLE - 20 { - c.1 = < tt_content.text - lm.1 = IMAGE - lm.1.file { - import = uploads/pics/ - import.field = image - import.listNum = 0 - maxW.field = imagewidth - maxW.ifEmpty = 200 - } - cMargins = 30,0,0,0 - } -} -tt_content.splash.1 < tt_content.splash.default -tt_content.splash.1.20.lm.1.file > -tt_content.splash.1.20.lm.1.file = GIFBUILDER -tt_content.splash.1.20.lm.1.file { - XY = [10.w]+10,[10.h]+10 - backColor = {$content.splash.bgCol} - backColor.override.data = register:pageColor - format = jpg - 5 = BOX - 5.dimensions = 3,3,[10.w],[10.h] - 5.color = #333333 - 7 = EFFECT - 7.value = blur=99|blur=99|blur=99|blur=99|blur=99|blur=99|blur=99 - 10 = IMAGE - 10.file { - import = uploads/pics/ - import.field = image - import.listNum = 0 - maxW.field = imagewidth - maxW.ifEmpty = 200 - } -} -// The image frames are not available unless TypoScript code from styles.content.imgFrames.x is provided manually: -tt_content.splash.2 < tt_content.splash.default -#tt_content.splash.2.20.lm.1.file.m < styles.content.imgFrames.1 -tt_content.splash.3 < tt_content.splash.default -#tt_content.splash.3.20.lm.1.file.m < styles.content.imgFrames.2 - -// From plugin.postit1, if included: -tt_content.splash.20 = < plugin.postit1 - - - -# **************** -# CType: menu -# **************** -tt_content.menu = COA -tt_content.menu { - 10 = < lib.stdheader - - 20 = CASE - 20 { - key.field = menu_type - - # "Menu of these pages" - default = HMENU - default { - special = list - special.value.field = pages - wrap =
      |
    - 1 = TMENU - 1 { - target = {$PAGE_TARGET} - NO { - stdWrap.htmlSpecialChars = 1 - wrapItemAndSub =
  • |
  • - ATagTitle.field = description // title - } - noBlur = 1 - } - } - - # "Menu of subpages to these pages" - 1 < .default - 1 { - special = directory - wrap =
      |
    - } - - # "Sitemap - liststyle" - 2 = HMENU - 2 { - wrap =
    |
    - 1 = TMENU - 1 { - target = {$PAGE_TARGET} - noBlur = 1 - expAll = 1 - wrap =
      |
    - NO { - stdWrap.htmlSpecialChars = 1 - wrapItemAndSub =
  • |
  • - ATagTitle.field = description // title - } - } - 2 < .1 - 3 < .1 - 4 < .1 - 5 < .1 - 6 < .1 - 7 < .1 - } - - # "Section index (pagecontent w/Index checked - liststyle)" - 3 < styles.content.get - 3 { - wrap =
      |
    - select.andWhere = sectionIndex!=0 - select.pidInList.override.field = pages - renderObj = TEXT - renderObj { - fieldRequired = header - trim = 1 - field = header - htmlSpecialChars = 1 - noBlur = 1 - wrap =
  • |
  • - typolink.parameter.field = pid - typolink.section.field = uid - } - } - - # "Menu of subpages to these pages (with abstract)" - 4 < .1 - 4 { - wrap =
    |
    - 1.NO { - wrapItemAndSub > - linkWrap =
    |
    - after { - data = field : abstract // field : description // field : subtitle - required = 1 - htmlSpecialChars = 1 - wrap =
    |
    - } - ATagTitle.field = description // title - } - } - - # "Recently updated pages" - 5 < .default - 5 { - wrap =
      |
    - special = updated - special { - maxAge = 3600*24*7 - excludeNoSearchPages = 1 - } - } - - # "Related pages (based on keywords)" - 6 < .default - 6 { - wrap =
      |
    - special = keywords - special { - excludeNoSearchPages = 1 - } - } - - # "Menu of subpages to these pages + sections - liststyle" - 7 < .1 - 7 { - wrap =
      |
    - 1.expAll = 1 - 2 < .1 - 2 { - sectionIndex = 1 - sectionIndex.type = header - wrap =
      |
    - NO.wrapItemAndSub =
  • |
  • - } - } - } - - 20.stdWrap { - editIcons = tt_content: menu_type, pages - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.menuSitemap - - prefixComment = 2 | Menu/Sitemap element: - } -} - - - -# **************** -# CType: shortcut -# **************** -# Should be a complete copy from the old static template "content (default)" -tt_content.shortcut = COA -tt_content.shortcut { - 20 = CASE - 20.key.field = layout - 20.0= RECORDS - 20.0 { - source.field = records - tables = {$content.shortcut.tables} - # THESE are OLD plugins. Modern plugins registers themselves automatically! - conf.tt_content = < tt_content - conf.tt_address = < tt_address - conf.tt_links = < tt_links - conf.tt_guest = < tt_guest - conf.tt_board = < tt_board - conf.tt_calender = < tt_calender - conf.tt_rating < tt_rating - conf.tt_products = < tt_products - conf.tt_news = < tt_news - conf.tt_poll = < plugin.tt_poll - } - 20.1= RECORDS - 20.1 { - source.field = records - tables = {$content.shortcut.tables} - conf.tt_poll = < plugin.tt_poll - conf.tt_poll.code = RESULT,SUBMITTEDVOTE - } - - 20.stdWrap { - editIcons = tt_content: records - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.recordList - - prefixComment = 2 | Inclusion of other records (by reference): - } -} - - -# **************** -# CType: list -# **************** -# Should be a complete copy from the old static template "content (default)" (except "lib.stdheader") -tt_content.list = COA -tt_content.list { - 10 = < lib.stdheader - - 20 = CASE - 20.key.field = list_type - 20 { - # LIST element references (NOT copy of objects!) - # THESE are OLD plugins. Modern plugins registers themselves automatically! - 3 = CASE - 3.key.field = layout - 3.0 = < plugin.tt_guest - - 4 = CASE - 4.key.field = layout - 4.0 = < plugin.tt_board_list - 4.1 = < plugin.tt_board_tree - - 2 = CASE - 2.key.field = layout - 2.0 = < plugin.tt_board_tree - - 5 = CASE - 5.key.field = layout - 5.0 = < plugin.tt_products - - 7 = CASE - 7.key.field = layout - 7.0 = < plugin.tt_calender - - 8 = CASE - 8.key.field = layout - 8.0 = < plugin.tt_rating - - 9 = CASE - 9.key.field = layout - 9.0 = < plugin.tt_news - - 11 = CASE - 11.key.field = layout - 11.0 = < plugin.tipafriend - - 20 = CASE - 20.key.field = layout - 20.0 = < plugin.feadmin.fe_users - - 21 = CASE - 21.key.field = layout - 21.0 = < plugin.feadmin.dmailsubscription - } - - 20.stdWrap { - editIcons = tt_content: list_type, layout, select_key, pages [recursive] - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.plugin - - prefixComment = 2 | Plugin inserted: - } -} - - -# **************** -# CType: script -# **************** -# OBSOLETE! Please make extensions instead. The "script" content element was meant for these custom purposes in the past. Today extensions will do the job better. -tt_content.script = TEXT -tt_content.script { - value = - - prefixComment = 2 | Script element (Deprecated) -} - - -# **************** -# CType: div -# **************** -tt_content.div = TEXT -tt_content.div { - value =
    - wrap =
    |
    - prefixComment = 2 | Div element -} - - -# **************** -# CType: html -# **************** -# This truely IS a content object, launched from inside the PHP class of course. -# Should be a complete copy from the old static template "content (default)" -tt_content.html = TEXT -tt_content.html { - field = bodytext - - editIcons = tt_content: pages - editIcons.iconTitle.data = LLL:EXT:css_styled_content/pi1/locallang.xml:eIcon.html - - prefixComment = 2 | Raw HTML content: -} - - -# **************** -# Default error msg: -# **************** -tt_content.default = TEXT -tt_content.default { - field = CType - wrap =

    ERROR: Content Element type "|" has no rendering definition!

    - - prefixComment = 2 | Unknown element message: -} - -# ********************************************************************* -# ACCESSIBILTY MODE -# ********************************************************************* - - - - - - - -plugin.tx_cssstyledcontent._CSS_DEFAULT_STYLE ( - /* Captions */ - DIV.csc-textpic-caption-c .csc-textpic-caption { text-align: center; } - DIV.csc-textpic-caption-r .csc-textpic-caption { text-align: right; } - DIV.csc-textpic-caption-l .csc-textpic-caption { text-align: left; } - - /* Needed for noRows setting */ - DIV.csc-textpic DIV.csc-textpic-imagecolumn { float: left; display: inline; } - - /* Border just around the image */ - {$styles.content.imgtext.borderSelector} { - border: {$styles.content.imgtext.borderThick}px solid {$styles.content.imgtext.borderColor}; - padding: {$styles.content.imgtext.borderSpace}px {$styles.content.imgtext.borderSpace}px; - } - - DIV.csc-textpic-imagewrap { padding: 0; } - - DIV.csc-textpic IMG { border: none; } - - /* DIV: This will place the images side by side */ - DIV.csc-textpic DIV.csc-textpic-imagewrap DIV.csc-textpic-image { float: left; } - - /* UL: This will place the images side by side */ - DIV.csc-textpic DIV.csc-textpic-imagewrap UL { list-style: none; margin: 0; padding: 0; } - DIV.csc-textpic DIV.csc-textpic-imagewrap UL LI { float: left; margin: 0; padding: 0; } - - /* DL: This will place the images side by side */ - DIV.csc-textpic DIV.csc-textpic-imagewrap DL.csc-textpic-image { float: left; } - DIV.csc-textpic DIV.csc-textpic-imagewrap DL.csc-textpic-image DT { float: none; } - DIV.csc-textpic DIV.csc-textpic-imagewrap DL.csc-textpic-image DD { float: none; } - DIV.csc-textpic DIV.csc-textpic-imagewrap DL.csc-textpic-image DD IMG { border: none; } /* FE-Editing Icons */ - DL.csc-textpic-image { margin: 0; } - DL.csc-textpic-image DT { margin: 0; display: inline; } - DL.csc-textpic-image DD { margin: 0; } - - /* Clearer */ - DIV.csc-textpic-clear { clear: both; } - - /* Margins around images: */ - - /* Pictures on left, add margin on right */ - DIV.csc-textpic-left DIV.csc-textpic-imagewrap .csc-textpic-image, - DIV.csc-textpic-intext-left-nowrap DIV.csc-textpic-imagewrap .csc-textpic-image, - DIV.csc-textpic-intext-left DIV.csc-textpic-imagewrap .csc-textpic-image { - display: inline; /* IE fix for double-margin bug */ - margin-right: {$styles.content.imgtext.colSpace}px; - } - - /* Pictures on right, add margin on left */ - DIV.csc-textpic-right DIV.csc-textpic-imagewrap .csc-textpic-image, - DIV.csc-textpic-intext-right-nowrap DIV.csc-textpic-imagewrap .csc-textpic-image, - DIV.csc-textpic-intext-right DIV.csc-textpic-imagewrap .csc-textpic-image { - display: inline; /* IE fix for double-margin bug */ - margin-left: {$styles.content.imgtext.colSpace}px; - } - - /* Pictures centered, add margin on left */ - DIV.csc-textpic-center DIV.csc-textpic-imagewrap .csc-textpic-image { - display: inline; /* IE fix for double-margin bug */ - margin-left: {$styles.content.imgtext.colSpace}px; - } - DIV.csc-textpic DIV.csc-textpic-imagewrap .csc-textpic-image .csc-textpic-caption { margin: 0; } - DIV.csc-textpic DIV.csc-textpic-imagewrap .csc-textpic-image IMG { margin: 0; vertical-align:bottom; } - - /* Space below each image (also in-between rows) */ - DIV.csc-textpic DIV.csc-textpic-imagewrap .csc-textpic-image { margin-bottom: {$styles.content.imgtext.rowSpace}px; } - DIV.csc-textpic-equalheight DIV.csc-textpic-imagerow { margin-bottom: {$styles.content.imgtext.rowSpace}px; display: block; } - DIV.csc-textpic DIV.csc-textpic-imagerow { clear: both; } - DIV.csc-textpic DIV.csc-textpic-single-image IMG { margin-bottom: {$styles.content.imgtext.rowSpace}px; } - - /* IE7 hack for margin between image rows */ - *+html DIV.csc-textpic DIV.csc-textpic-imagerow .csc-textpic-image { margin-bottom: 0; } - *+html DIV.csc-textpic DIV.csc-textpic-imagerow { margin-bottom: {$styles.content.imgtext.rowSpace}px; } - - /* No margins around the whole image-block */ - DIV.csc-textpic DIV.csc-textpic-imagewrap .csc-textpic-firstcol { margin-left: 0px !important; } - DIV.csc-textpic DIV.csc-textpic-imagewrap .csc-textpic-lastcol { margin-right: 0px !important; } - - /* Add margin from image-block to text (in case of "Text w/ images") */ - DIV.csc-textpic-intext-left DIV.csc-textpic-imagewrap, - DIV.csc-textpic-intext-left-nowrap DIV.csc-textpic-imagewrap { - margin-right: {$styles.content.imgtext.textMargin}px !important; - } - DIV.csc-textpic-intext-right DIV.csc-textpic-imagewrap, - DIV.csc-textpic-intext-right-nowrap DIV.csc-textpic-imagewrap { - margin-left: {$styles.content.imgtext.textMargin}px !important; - } - - /* Positioning of images: */ - - /* Above */ - DIV.csc-textpic-above DIV.csc-textpic-text { clear: both; } - - /* Center (above or below) */ - DIV.csc-textpic-center { text-align: center; /* IE-hack */ } - DIV.csc-textpic-center DIV.csc-textpic-imagewrap { margin: 0 auto; } - DIV.csc-textpic-center DIV.csc-textpic-imagewrap .csc-textpic-image { text-align: left; /* Remove IE-hack */ } - DIV.csc-textpic-center DIV.csc-textpic-text { text-align: left; /* Remove IE-hack */ } - - /* Right (above or below) */ - DIV.csc-textpic-right DIV.csc-textpic-imagewrap { float: right; } - DIV.csc-textpic-right DIV.csc-textpic-text { clear: right; } - - /* Left (above or below) */ - DIV.csc-textpic-left DIV.csc-textpic-imagewrap { float: left; } - DIV.csc-textpic-left DIV.csc-textpic-text { clear: left; } - - /* Left (in text) */ - DIV.csc-textpic-intext-left DIV.csc-textpic-imagewrap { float: left; } - - /* Right (in text) */ - DIV.csc-textpic-intext-right DIV.csc-textpic-imagewrap { float: right; } - - /* Right (in text, no wrap around) */ - DIV.csc-textpic-intext-right-nowrap DIV.csc-textpic-imagewrap { float: right; clear: both; } - /* Hide from IE5-mac. Only IE-win sees this. \*/ - * html DIV.csc-textpic-intext-right-nowrap .csc-textpic-text { height: 1%; } - /* End hide from IE5/mac */ - - /* Left (in text, no wrap around) */ - DIV.csc-textpic-intext-left-nowrap DIV.csc-textpic-imagewrap { float: left; clear: both; } - /* Hide from IE5-mac. Only IE-win sees this. \*/ - * html DIV.csc-textpic-intext-left-nowrap .csc-textpic-text, - * html .csc-textpic-intext-left ol, - * html .csc-textpic-intext-left ul { height: 1%; } - /* End hide from IE5/mac */ - - DIV.csc-textpic DIV.csc-textpic-imagerow-last { margin-bottom: 0; } - - /* Browser fixes: */ - - /* Fix for unordered and ordered list with image "In text, left" */ - .csc-textpic-intext-left ol, .csc-textpic-intext-left ul {padding-left: 40px; overflow: auto; } -) - -# TYPO3 SVN ID: $Id$ - diff --git a/tests/examplefiles/underscore.coffee b/tests/examplefiles/underscore.coffee deleted file mode 100644 index a34a1ce8..00000000 --- a/tests/examplefiles/underscore.coffee +++ /dev/null @@ -1,603 +0,0 @@ - - # Underscore.coffee - # (c) 2010 Jeremy Ashkenas, DocumentCloud Inc. - # Underscore is freely distributable under the terms of the MIT license. - # Portions of Underscore are inspired by or borrowed from Prototype.js, - # Oliver Steele's Functional, and John Resig's Micro-Templating. - # For all details and documentation: - # http://documentcloud.github.com/underscore/ - - - # ------------------------- Baseline setup --------------------------------- - - # Establish the root object, "window" in the browser, or "global" on the server. - root: this - - - # Save the previous value of the "_" variable. - previousUnderscore: root._ - - - # If Underscore is called as a function, it returns a wrapped object that - # can be used OO-style. This wrapper holds altered versions of all the - # underscore functions. Wrapped objects may be chained. - wrapper: (obj) -> - this._wrapped: obj - this - - - # Establish the object that gets thrown to break out of a loop iteration. - breaker: if typeof(StopIteration) is 'undefined' then '__break__' else StopIteration - - - # Create a safe reference to the Underscore object forreference below. - _: root._: (obj) -> new wrapper(obj) - - - # Export the Underscore object for CommonJS. - if typeof(exports) != 'undefined' then exports._: _ - - - # Create quick reference variables for speed access to core prototypes. - slice: Array::slice - unshift: Array::unshift - toString: Object::toString - hasOwnProperty: Object::hasOwnProperty - propertyIsEnumerable: Object::propertyIsEnumerable - - - # Current version. - _.VERSION: '0.5.7' - - - # ------------------------ Collection Functions: --------------------------- - - # The cornerstone, an each implementation. - # Handles objects implementing forEach, arrays, and raw objects. - _.each: (obj, iterator, context) -> - index: 0 - try - return obj.forEach(iterator, context) if obj.forEach - if _.isArray(obj) or _.isArguments(obj) - return iterator.call(context, obj[i], i, obj) for i in [0...obj.length] - iterator.call(context, val, key, obj) for key, val of obj - catch e - throw e if e isnt breaker - obj - - - # Return the results of applying the iterator to each element. Use JavaScript - # 1.6's version of map, if possible. - _.map: (obj, iterator, context) -> - return obj.map(iterator, context) if (obj and _.isFunction(obj.map)) - results: [] - _.each obj, (value, index, list) -> - results.push(iterator.call(context, value, index, list)) - results - - - # Reduce builds up a single result from a list of values. Also known as - # inject, or foldl. Uses JavaScript 1.8's version of reduce, if possible. - _.reduce: (obj, memo, iterator, context) -> - return obj.reduce(_.bind(iterator, context), memo) if (obj and _.isFunction(obj.reduce)) - _.each obj, (value, index, list) -> - memo: iterator.call(context, memo, value, index, list) - memo - - - # The right-associative version of reduce, also known as foldr. Uses - # JavaScript 1.8's version of reduceRight, if available. - _.reduceRight: (obj, memo, iterator, context) -> - return obj.reduceRight(_.bind(iterator, context), memo) if (obj and _.isFunction(obj.reduceRight)) - _.each _.clone(_.toArray(obj)).reverse(), (value, index) -> - memo: iterator.call(context, memo, value, index, obj) - memo - - - # Return the first value which passes a truth test. - _.detect: (obj, iterator, context) -> - result: null - _.each obj, (value, index, list) -> - if iterator.call(context, value, index, list) - result: value - _.breakLoop() - result - - - # Return all the elements that pass a truth test. Use JavaScript 1.6's - # filter(), if it exists. - _.select: (obj, iterator, context) -> - if obj and _.isFunction(obj.filter) then return obj.filter(iterator, context) - results: [] - _.each obj, (value, index, list) -> - results.push(value) if iterator.call(context, value, index, list) - results - - - # Return all the elements for which a truth test fails. - _.reject: (obj, iterator, context) -> - results: [] - _.each obj, (value, index, list) -> - results.push(value) if not iterator.call(context, value, index, list) - results - - - # Determine whether all of the elements match a truth test. Delegate to - # JavaScript 1.6's every(), if it is present. - _.all: (obj, iterator, context) -> - iterator ||= _.identity - return obj.every(iterator, context) if obj and _.isFunction(obj.every) - result: true - _.each obj, (value, index, list) -> - _.breakLoop() unless (result: result and iterator.call(context, value, index, list)) - result - - - # Determine if at least one element in the object matches a truth test. Use - # JavaScript 1.6's some(), if it exists. - _.any: (obj, iterator, context) -> - iterator ||= _.identity - return obj.some(iterator, context) if obj and _.isFunction(obj.some) - result: false - _.each obj, (value, index, list) -> - _.breakLoop() if (result: iterator.call(context, value, index, list)) - result - - - # Determine if a given value is included in the array or object, - # based on '==='. - _.include: (obj, target) -> - return _.indexOf(obj, target) isnt -1 if _.isArray(obj) - for key, val of obj - return true if val is target - false - - - # Invoke a method with arguments on every item in a collection. - _.invoke: (obj, method) -> - args: _.rest(arguments, 2) - (if method then val[method] else val).apply(val, args) for val in obj - - - # Convenience version of a common use case of map: fetching a property. - _.pluck: (obj, key) -> - _.map(obj, ((val) -> val[key])) - - - # Return the maximum item or (item-based computation). - _.max: (obj, iterator, context) -> - return Math.max.apply(Math, obj) if not iterator and _.isArray(obj) - result: {computed: -Infinity} - _.each obj, (value, index, list) -> - computed: if iterator then iterator.call(context, value, index, list) else value - computed >= result.computed and (result: {value: value, computed: computed}) - result.value - - - # Return the minimum element (or element-based computation). - _.min: (obj, iterator, context) -> - return Math.min.apply(Math, obj) if not iterator and _.isArray(obj) - result: {computed: Infinity} - _.each obj, (value, index, list) -> - computed: if iterator then iterator.call(context, value, index, list) else value - computed < result.computed and (result: {value: value, computed: computed}) - result.value - - - # Sort the object's values by a criteria produced by an iterator. - _.sortBy: (obj, iterator, context) -> - _.pluck(((_.map obj, (value, index, list) -> - {value: value, criteria: iterator.call(context, value, index, list)} - ).sort((left, right) -> - a: left.criteria; b: right.criteria - if a < b then -1 else if a > b then 1 else 0 - )), 'value') - - - # Use a comparator function to figure out at what index an object should - # be inserted so as to maintain order. Uses binary search. - _.sortedIndex: (array, obj, iterator) -> - iterator ||= _.identity - low: 0; high: array.length - while low < high - mid: (low + high) >> 1 - if iterator(array[mid]) < iterator(obj) then low: mid + 1 else high: mid - low - - - # Convert anything iterable into a real, live array. - _.toArray: (iterable) -> - return [] if (!iterable) - return iterable.toArray() if (iterable.toArray) - return iterable if (_.isArray(iterable)) - return slice.call(iterable) if (_.isArguments(iterable)) - _.values(iterable) - - - # Return the number of elements in an object. - _.size: (obj) -> _.toArray(obj).length - - - # -------------------------- Array Functions: ------------------------------ - - # Get the first element of an array. Passing "n" will return the first N - # values in the array. Aliased as "head". The "guard" check allows it to work - # with _.map. - _.first: (array, n, guard) -> - if n and not guard then slice.call(array, 0, n) else array[0] - - - # Returns everything but the first entry of the array. Aliased as "tail". - # Especially useful on the arguments object. Passing an "index" will return - # the rest of the values in the array from that index onward. The "guard" - # check allows it to work with _.map. - _.rest: (array, index, guard) -> - slice.call(array, if _.isUndefined(index) or guard then 1 else index) - - - # Get the last element of an array. - _.last: (array) -> array[array.length - 1] - - - # Trim out all falsy values from an array. - _.compact: (array) -> array[i] for i in [0...array.length] when array[i] - - - # Return a completely flattened version of an array. - _.flatten: (array) -> - _.reduce array, [], (memo, value) -> - return memo.concat(_.flatten(value)) if _.isArray(value) - memo.push(value) - memo - - - # Return a version of the array that does not contain the specified value(s). - _.without: (array) -> - values: _.rest(arguments) - val for val in _.toArray(array) when not _.include(values, val) - - - # Produce a duplicate-free version of the array. If the array has already - # been sorted, you have the option of using a faster algorithm. - _.uniq: (array, isSorted) -> - memo: [] - for el, i in _.toArray(array) - memo.push(el) if i is 0 || (if isSorted is true then _.last(memo) isnt el else not _.include(memo, el)) - memo - - - # Produce an array that contains every item shared between all the - # passed-in arrays. - _.intersect: (array) -> - rest: _.rest(arguments) - _.select _.uniq(array), (item) -> - _.all rest, (other) -> - _.indexOf(other, item) >= 0 - - - # Zip together multiple lists into a single array -- elements that share - # an index go together. - _.zip: -> - length: _.max(_.pluck(arguments, 'length')) - results: new Array(length) - for i in [0...length] - results[i]: _.pluck(arguments, String(i)) - results - - - # If the browser doesn't supply us with indexOf (I'm looking at you, MSIE), - # we need this function. Return the position of the first occurence of an - # item in an array, or -1 if the item is not included in the array. - _.indexOf: (array, item) -> - return array.indexOf(item) if array.indexOf - i: 0; l: array.length - while l - i - if array[i] is item then return i else i++ - -1 - - - # Provide JavaScript 1.6's lastIndexOf, delegating to the native function, - # if possible. - _.lastIndexOf: (array, item) -> - return array.lastIndexOf(item) if array.lastIndexOf - i: array.length - while i - if array[i] is item then return i else i-- - -1 - - - # Generate an integer Array containing an arithmetic progression. A port of - # the native Python range() function. See: - # http://docs.python.org/library/functions.html#range - _.range: (start, stop, step) -> - a: arguments - solo: a.length <= 1 - i: start: if solo then 0 else a[0]; - stop: if solo then a[0] else a[1]; - step: a[2] or 1 - len: Math.ceil((stop - start) / step) - return [] if len <= 0 - range: new Array(len) - idx: 0 - while true - return range if (if step > 0 then i - stop else stop - i) >= 0 - range[idx]: i - idx++ - i+= step - - - # ----------------------- Function Functions: ----------------------------- - - # Create a function bound to a given object (assigning 'this', and arguments, - # optionally). Binding with arguments is also known as 'curry'. - _.bind: (func, obj) -> - args: _.rest(arguments, 2) - -> func.apply(obj or root, args.concat(arguments)) - - - # Bind all of an object's methods to that object. Useful for ensuring that - # all callbacks defined on an object belong to it. - _.bindAll: (obj) -> - funcs: if arguments.length > 1 then _.rest(arguments) else _.functions(obj) - _.each(funcs, (f) -> obj[f]: _.bind(obj[f], obj)) - obj - - - # Delays a function for the given number of milliseconds, and then calls - # it with the arguments supplied. - _.delay: (func, wait) -> - args: _.rest(arguments, 2) - setTimeout((-> func.apply(func, args)), wait) - - - # Defers a function, scheduling it to run after the current call stack has - # cleared. - _.defer: (func) -> - _.delay.apply(_, [func, 1].concat(_.rest(arguments))) - - - # Returns the first function passed as an argument to the second, - # allowing you to adjust arguments, run code before and after, and - # conditionally execute the original function. - _.wrap: (func, wrapper) -> - -> wrapper.apply(wrapper, [func].concat(arguments)) - - - # Returns a function that is the composition of a list of functions, each - # consuming the return value of the function that follows. - _.compose: -> - funcs: arguments - -> - args: arguments - for i in [(funcs.length - 1)..0] - args: [funcs[i].apply(this, args)] - args[0] - - - # ------------------------- Object Functions: ---------------------------- - - # Retrieve the names of an object's properties. - _.keys: (obj) -> - return _.range(0, obj.length) if _.isArray(obj) - key for key, val of obj - - - # Retrieve the values of an object's properties. - _.values: (obj) -> - _.map(obj, _.identity) - - - # Return a sorted list of the function names available in Underscore. - _.functions: (obj) -> - _.select(_.keys(obj), (key) -> _.isFunction(obj[key])).sort() - - - # Extend a given object with all of the properties in a source object. - _.extend: (destination, source) -> - for key, val of source - destination[key]: val - destination - - - # Create a (shallow-cloned) duplicate of an object. - _.clone: (obj) -> - return obj.slice(0) if _.isArray(obj) - _.extend({}, obj) - - - # Invokes interceptor with the obj, and then returns obj. - # The primary purpose of this method is to "tap into" a method chain, in order to perform operations on intermediate results within the chain. - _.tap: (obj, interceptor) -> - interceptor(obj) - obj - - - # Perform a deep comparison to check if two objects are equal. - _.isEqual: (a, b) -> - # Check object identity. - return true if a is b - # Different types? - atype: typeof(a); btype: typeof(b) - return false if atype isnt btype - # Basic equality test (watch out for coercions). - return true if `a == b` - # One is falsy and the other truthy. - return false if (!a and b) or (a and !b) - # One of them implements an isEqual()? - return a.isEqual(b) if a.isEqual - # Check dates' integer values. - return a.getTime() is b.getTime() if _.isDate(a) and _.isDate(b) - # Both are NaN? - return true if _.isNaN(a) and _.isNaN(b) - # Compare regular expressions. - if _.isRegExp(a) and _.isRegExp(b) - return a.source is b.source and - a.global is b.global and - a.ignoreCase is b.ignoreCase and - a.multiline is b.multiline - # If a is not an object by this point, we can't handle it. - return false if atype isnt 'object' - # Check for different array lengths before comparing contents. - return false if a.length and (a.length isnt b.length) - # Nothing else worked, deep compare the contents. - aKeys: _.keys(a); bKeys: _.keys(b) - # Different object sizes? - return false if aKeys.length isnt bKeys.length - # Recursive comparison of contents. - # for (var key in a) if (!_.isEqual(a[key], b[key])) return false; - return true - - - # Is a given array or object empty? - _.isEmpty: (obj) -> _.keys(obj).length is 0 - - - # Is a given value a DOM element? - _.isElement: (obj) -> obj and obj.nodeType is 1 - - - # Is a given value an array? - _.isArray: (obj) -> !!(obj and obj.concat and obj.unshift) - - - # Is a given variable an arguments object? - _.isArguments: (obj) -> obj and _.isNumber(obj.length) and not obj.concat and - not obj.substr and not obj.apply and not propertyIsEnumerable.call(obj, 'length') - - - # Is the given value a function? - _.isFunction: (obj) -> !!(obj and obj.constructor and obj.call and obj.apply) - - - # Is the given value a string? - _.isString: (obj) -> !!(obj is '' or (obj and obj.charCodeAt and obj.substr)) - - - # Is a given value a number? - _.isNumber: (obj) -> (obj is +obj) or toString.call(obj) is '[object Number]' - - - # Is a given value a Date? - _.isDate: (obj) -> !!(obj and obj.getTimezoneOffset and obj.setUTCFullYear) - - - # Is the given value a regular expression? - _.isRegExp: (obj) -> !!(obj and obj.exec and (obj.ignoreCase or obj.ignoreCase is false)) - - - # Is the given value NaN -- this one is interesting. NaN != NaN, and - # isNaN(undefined) == true, so we make sure it's a number first. - _.isNaN: (obj) -> _.isNumber(obj) and window.isNaN(obj) - - - # Is a given value equal to null? - _.isNull: (obj) -> obj is null - - - # Is a given variable undefined? - _.isUndefined: (obj) -> typeof obj is 'undefined' - - - # -------------------------- Utility Functions: -------------------------- - - # Run Underscore.js in noConflict mode, returning the '_' variable to its - # previous owner. Returns a reference to the Underscore object. - _.noConflict: -> - root._: previousUnderscore - this - - - # Keep the identity function around for default iterators. - _.identity: (value) -> value - - - # Break out of the middle of an iteration. - _.breakLoop: -> throw breaker - - - # Generate a unique integer id (unique within the entire client session). - # Useful for temporary DOM ids. - idCounter: 0 - _.uniqueId: (prefix) -> - (prefix or '') + idCounter++ - - - # By default, Underscore uses ERB-style template delimiters, change the - # following template settings to use alternative delimiters. - _.templateSettings: { - start: '<%' - end: '%>' - interpolate: /<%=(.+?)%>/g - } - - - # JavaScript templating a-la ERB, pilfered from John Resig's - # "Secrets of the JavaScript Ninja", page 83. - # Single-quotea fix from Rick Strahl's version. - _.template: (str, data) -> - c: _.templateSettings - fn: new Function 'obj', - 'var p=[],print=function(){p.push.apply(p,arguments);};' + - 'with(obj){p.push(\'' + - str.replace(/[\r\t\n]/g, " ") - .replace(new RegExp("'(?=[^"+c.end[0]+"]*"+c.end+")","g"),"\t") - .split("'").join("\\'") - .split("\t").join("'") - .replace(c.interpolate, "',$1,'") - .split(c.start).join("');") - .split(c.end).join("p.push('") + - "');}return p.join('');" - if data then fn(data) else fn - - - # ------------------------------- Aliases ---------------------------------- - - _.forEach: _.each - _.foldl: _.inject: _.reduce - _.foldr: _.reduceRight - _.filter: _.select - _.every: _.all - _.some: _.any - _.head: _.first - _.tail: _.rest - _.methods: _.functions - - - # /*------------------------ Setup the OOP Wrapper: --------------------------*/ - - # Helper function to continue chaining intermediate results. - result: (obj, chain) -> - if chain then _(obj).chain() else obj - - - # Add all of the Underscore functions to the wrapper object. - _.each _.functions(_), (name) -> - method: _[name] - wrapper.prototype[name]: -> - unshift.call(arguments, this._wrapped) - result(method.apply(_, arguments), this._chain) - - - # Add all mutator Array functions to the wrapper. - _.each ['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], (name) -> - method: Array.prototype[name] - wrapper.prototype[name]: -> - method.apply(this._wrapped, arguments) - result(this._wrapped, this._chain) - - - # Add all accessor Array functions to the wrapper. - _.each ['concat', 'join', 'slice'], (name) -> - method: Array.prototype[name] - wrapper.prototype[name]: -> - result(method.apply(this._wrapped, arguments), this._chain) - - - # Start chaining a wrapped Underscore object. - wrapper::chain: -> - this._chain: true - this - - - # Extracts the result from a wrapped and chained object. - wrapper::value: -> this._wrapped diff --git a/tests/examplefiles/unicode.applescript b/tests/examplefiles/unicode.applescript deleted file mode 100644 index 8cc6c6fb..00000000 --- a/tests/examplefiles/unicode.applescript +++ /dev/null @@ -1,5 +0,0 @@ -set jp to "日本語" - -set ru to "Русский" - -jp & " and " & ru -- returns "日本語 and Русский" diff --git a/tests/examplefiles/unicode.go b/tests/examplefiles/unicode.go deleted file mode 100644 index d4bef4d1..00000000 --- a/tests/examplefiles/unicode.go +++ /dev/null @@ -1,10 +0,0 @@ -package main - -import "fmt" - -func main() { - 世界 := "Hello, world!" - さようなら := "Goodbye, world!" - fmt.Println(世界) - fmt.Println(さようなら) -} diff --git a/tests/examplefiles/unicode.js b/tests/examplefiles/unicode.js deleted file mode 100644 index 8f553f6f..00000000 --- a/tests/examplefiles/unicode.js +++ /dev/null @@ -1,6 +0,0 @@ -var école; -var sinθ; -var เมือง; -var a\u1234b; - -var nbsp; diff --git a/tests/examplefiles/unicodedoc.py b/tests/examplefiles/unicodedoc.py deleted file mode 100644 index 9d3db0c8..00000000 --- a/tests/examplefiles/unicodedoc.py +++ /dev/null @@ -1,11 +0,0 @@ -def foo(): - ur"""unicode-raw""" - -def bar(): - u"""unicode""" - -def baz(): - r'raw' - -def zap(): - """docstring""" diff --git a/tests/examplefiles/unix-io.lid b/tests/examplefiles/unix-io.lid deleted file mode 100644 index 617fcaa4..00000000 --- a/tests/examplefiles/unix-io.lid +++ /dev/null @@ -1,37 +0,0 @@ -Library: io -Synopsis: A portable IO library -Author: Gail Zacharias -Files: library - streams/defs - streams/stream - streams/sequence-stream - streams/native-buffer - streams/buffer - streams/typed-stream - streams/external-stream - streams/buffered-stream - streams/convenience - streams/wrapper-stream - streams/cleanup-streams - streams/native-speed - streams/async-writes - streams/file-stream - streams/multi-buffered-streams - pprint - print - print-double-integer-kludge - format - buffered-format - format-condition - unix-file-accessor - unix-standard-io - unix-interface - format-out -C-Source-Files: unix-portability.c -Major-Version: 2 -Minor-Version: 1 -Target-Type: dll -Copyright: Original Code is Copyright (c) 1995-2004 Functional Objects, Inc. - All rights reserved. -License: See License.txt in this distribution for details. -Warranty: Distributed WITHOUT WARRANTY OF ANY KIND diff --git a/tests/examplefiles/varnish.vcl b/tests/examplefiles/varnish.vcl deleted file mode 100644 index 6258c313..00000000 --- a/tests/examplefiles/varnish.vcl +++ /dev/null @@ -1,187 +0,0 @@ -# This is the VCL configuration Varnish will automatically append to your VCL -# file during compilation/loading. See the vcl(7) man page for details on syntax -# and semantics. -# New users is recommended to use the example.vcl file as a starting point. - -vcl 4.0; - -backend foo { .host = "192.168.1.1"; } - -probe blatti { .url = "foo"; } -probe fooy { - .url = "beh"; - -} - -acl foo { - "192.168.1.1"; - "192.168.0.0"/24; - ! "192.168.0.1"; -} - -include "foo.vcl"; - -import std; - -sub vcl_init { - new b = director.foo(); -} - -sub vcl_recv { - ban(req.url ~ "foo"); - rollback(); -} -sub vcl_recv { - if (req.method == "PRI") { - /* We do not support SPDY or HTTP/2.0 */ - return (synth(405)); - } - if (req.method != "GET" && - req.method != "HEAD" && - req.method != "PUT" && - req.method != "POST" && - req.method != "TRACE" && - req.method != "OPTIONS" && - req.method != "DELETE") { - /* Non-RFC2616 or CONNECT which is weird. */ - return (pipe); - } - - if (req.method != "GET" && req.method != "HEAD") { - /* We only deal with GET and HEAD by default */ - return (pass); - } - if (req.http.Authorization || req.http.Cookie) { - /* Not cacheable by default */ - return (pass); - } - return (hash); -} - -sub vcl_pipe { - # By default Connection: close is set on all piped requests, to stop - # connection reuse from sending future requests directly to the - # (potentially) wrong backend. If you do want this to happen, you can undo - # it here. - # unset bereq.http.connection; - return (pipe); -} - -sub vcl_pass { - return (fetch); -} - -sub vcl_hash { - hash_data(req.url); - if (req.http.host) { - hash_data(req.http.host); - } else { - hash_data(server.ip); - } - return (lookup); -} - -sub vcl_purge { - return (synth(200, "Purged")); -} - -sub vcl_hit { - if (obj.ttl >= 0s) { - // A pure unadultered hit, deliver it - return (deliver); - } - if (obj.ttl + obj.grace > 0s) { - // Object is in grace, deliver it - // Automatically triggers a background fetch - return (deliver); - } - // fetch & deliver once we get the result - return (miss); -} - -sub vcl_miss { - return (fetch); -} - -sub vcl_deliver { - set resp.http.x-storage = storage.s0.free; - return (deliver); -} - -/* - * We can come here "invisibly" with the following errors: 413, 417 & 503 - */ -sub vcl_synth { - set resp.http.Content-Type = "text/html; charset=utf-8"; - set resp.http.Retry-After = "5"; - synthetic( {" - - - "} + resp.status + " " + resp.reason + {" - - -

    Error "} + resp.status + " " + resp.reason + {"

    -

    "} + resp.reason + {"

    -

    Guru Meditation:

    -

    XID: "} + req.xid + {"

    -
    -

    Varnish cache server

    - - -"} ); - return (deliver); -} - -####################################################################### -# Backend Fetch - -sub vcl_backend_fetch { - return (fetch); -} - -sub vcl_backend_response { - if (beresp.ttl <= 0s || - beresp.http.Set-Cookie || - beresp.http.Surrogate-control ~ "no-store" || - (!beresp.http.Surrogate-Control && - beresp.http.Cache-Control ~ "no-cache|no-store|private") || - beresp.http.Vary == "*") { - /* - * Mark as "Hit-For-Pass" for the next 2 minutes - */ - set beresp.ttl = 120s; - set beresp.uncacheable = true; - } - return (deliver); -} - -sub vcl_backend_error { - set beresp.http.Content-Type = "text/html; charset=utf-8"; - set beresp.http.Retry-After = "5"; - synthetic( {" - - - "} + beresp.status + " " + beresp.reason + {" - - -

    Error "} + beresp.status + " " + beresp.reason + {"

    -

    "} + beresp.reason + {"

    -

    Guru Meditation:

    -

    XID: "} + bereq.xid + {"

    -
    -

    Varnish cache server

    - - -"} ); - return (deliver); -} - -####################################################################### -# Housekeeping - -sub vcl_init { -} - -sub vcl_fini { - return (ok); -} diff --git a/tests/examplefiles/vbnet_test.bas b/tests/examplefiles/vbnet_test.bas deleted file mode 100644 index af5f2574..00000000 --- a/tests/examplefiles/vbnet_test.bas +++ /dev/null @@ -1,29 +0,0 @@ -Public Class Form1 - Inherits System.Windows.Forms.Form - - Private t As New System.Timers.Timer(2000) - - Private Sub Form1_Load(ByVal sender As Object, _ - ByVal e As System.EventArgs) Handles MyBase.Load - - AddHandler t.Elapsed, AddressOf TimerFired - End Sub - - Private Sub btnStart_Click(ByVal sender As System.Object, _ - ByVal e As System.EventArgs) Handles btnStart.Click - - t.Enabled = True - End Sub - - Private Sub btnStop_Click(ByVal sender As System.Object, _ - ByVal e As System.EventArgs) Handles btnStop.Click - - t.Enabled = False - End Sub - - Public Sub TimerFired(ByVal sender As Object, _ - ByVal e As System.Timers.ElapsedEventArgs) - - Label1.Text = "Signal Time = " & e.SignalTime.ToString - End Sub -End Class diff --git a/tests/examplefiles/vctreestatus_hg b/tests/examplefiles/vctreestatus_hg deleted file mode 100644 index 193ed803..00000000 --- a/tests/examplefiles/vctreestatus_hg +++ /dev/null @@ -1,4 +0,0 @@ -M LICENSE -M setup.py -! setup.cfg -? vctreestatus_hg diff --git a/tests/examplefiles/vimrc b/tests/examplefiles/vimrc deleted file mode 100644 index d2f9cd1b..00000000 --- a/tests/examplefiles/vimrc +++ /dev/null @@ -1,21 +0,0 @@ -" A comment - -:py print "py" -::pyt print 'pyt' - pyth print '''pyth''' - : pytho print "pytho" -python print """python""" - - : : python<`, a lexer is a class that is -initialized with some keyword arguments (the lexer options) and that provides a -:meth:`.get_tokens_unprocessed()` method which is given a string or unicode -object with the data to [-parse.-] {+lex.+} - -The :meth:`.get_tokens_unprocessed()` method must return an iterator or iterable -containing tuples in the form ``(index, token, value)``. Normally you don't -need to do this since there are [-numerous-] base lexers {+that do most of the work and that+} -you can subclass. - - -RegexLexer -========== - -[-A very powerful (but quite easy to use)-] - -{+The+} lexer {+base class used by almost all of Pygments' lexers+} is the -:class:`RegexLexer`. This -[-lexer base-] class allows you to define lexing rules in terms of -*regular expressions* for different *states*. - -States are groups of regular expressions that are matched against the input -string at the *current position*. If one of these expressions matches, a -corresponding action is performed [-(normally-] {+(such as+} yielding a token with a specific -[-type),-] -{+type, or changing state),+} the current position is set to where the last match -ended and the matching process continues with the first regex of the current -state. - -Lexer states are kept [-in-] {+on+} a [-state-] stack: each time a new state is entered, the new -state is pushed onto the stack. The most basic lexers (like the `DiffLexer`) -just need one state. - -Each state is defined as a list of tuples in the form (`regex`, `action`, -`new_state`) where the last item is optional. In the most basic form, `action` -is a token type (like `Name.Builtin`). That means: When `regex` matches, emit a -token with the match text and type `tokentype` and push `new_state` on the state -stack. If the new state is ``'#pop'``, the topmost state is popped from the -stack instead. [-(To-] {+To+} pop more than one state, use ``'#pop:2'`` and so [-on.)-] {+on.+} -``'#push'`` is a synonym for pushing the current state on the stack. - -The following example shows the `DiffLexer` from the builtin lexers. Note that -it contains some additional attributes `name`, `aliases` and `filenames` which -aren't required for a lexer. They are used by the builtin lexer lookup -functions. - -[-.. sourcecode:: python-] {+::+} - - from pygments.lexer import RegexLexer - from pygments.token import * - - class DiffLexer(RegexLexer): - name = 'Diff' - aliases = ['diff'] - filenames = ['*.diff'] - - tokens = { - 'root': [ - (r' .*\n', Text), - (r'\+.*\n', Generic.Inserted), - (r'-.*\n', Generic.Deleted), - (r'@.*\n', Generic.Subheading), - (r'Index.*\n', Generic.Heading), - (r'=.*\n', Generic.Heading), - (r'.*\n', Text), - ] - } - -As you can see this lexer only uses one state. When the lexer starts scanning -the text, it first checks if the current character is a space. If this is true -it scans everything until newline and returns the [-parsed-] data as {+a+} `Text` [-token.-] {+token (which -is the "no special highlighting" token).+} - -If this rule doesn't match, it checks if the current char is a plus sign. And -so on. - -If no rule matches at the current position, the current char is emitted as an -`Error` token that indicates a [-parsing-] {+lexing+} error, and the position is increased by -[-1.-] -{+one.+} - - -Adding and testing a new lexer -============================== - -To make [-pygments-] {+Pygments+} aware of your new lexer, you have to perform the following -steps: - -First, change to the current directory containing the [-pygments-] {+Pygments+} source code: - -.. [-sourcecode::-] {+code-block::+} console - - $ cd .../pygments-main - -{+Select a matching module under ``pygments/lexers``, or create a new module for -your lexer class.+} - -Next, make sure the lexer is known from outside of the module. All modules in -the ``pygments.lexers`` specify ``__all__``. For example, [-``other.py`` sets: - -.. sourcecode:: python-] {+``esoteric.py`` sets::+} - - __all__ = ['BrainfuckLexer', 'BefungeLexer', ...] - -Simply add the name of your lexer class to this list. - -Finally the lexer can be made [-publically-] {+publicly+} known by rebuilding the lexer mapping: - -.. [-sourcecode::-] {+code-block::+} console - - $ make mapfiles - -To test the new lexer, store an example file with the proper extension in -``tests/examplefiles``. For example, to test your ``DiffLexer``, add a -``tests/examplefiles/example.diff`` containing a sample diff output. - -Now you can use pygmentize to render your example to HTML: - -.. [-sourcecode::-] {+code-block::+} console - - $ ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff - -Note that this [-explicitely-] {+explicitly+} calls the ``pygmentize`` in the current directory -by preceding it with ``./``. This ensures your modifications are used. -Otherwise a possibly already installed, unmodified version without your new -lexer would have been called from the system search path (``$PATH``). - -To view the result, open ``/tmp/example.html`` in your browser. - -Once the example renders as expected, you should run the complete test suite: - -.. [-sourcecode::-] {+code-block::+} console - - $ make test - -{+It also tests that your lexer fulfills the lexer API and certain invariants, -such as that the concatenation of all token text is the same as the input text.+} - - -Regex Flags -=========== - -You can either define regex flags {+locally+} in the regex (``r'(?x)foo bar'``) or -{+globally+} by adding a `flags` attribute to your lexer class. If no attribute is -defined, it defaults to `re.MULTILINE`. For more [-informations-] {+information+} about regular -expression flags see the {+page about+} `regular expressions`_ [-help page-] in the [-python-] {+Python+} -documentation. - -.. _regular expressions: [-http://docs.python.org/lib/re-syntax.html-] {+http://docs.python.org/library/re.html#regular-expression-syntax+} - - -Scanning multiple tokens at once -================================ - -{+So far, the `action` element in the rule tuple of regex, action and state has -been a single token type. Now we look at the first of several other possible -values.+} - -Here is a more complex lexer that highlights INI files. INI files consist of -sections, comments and [-key-] {+``key+} = [-value pairs: - -.. sourcecode:: python-] {+value`` pairs::+} - - from pygments.lexer import RegexLexer, bygroups - from pygments.token import * - - class IniLexer(RegexLexer): - name = 'INI' - aliases = ['ini', 'cfg'] - filenames = ['*.ini', '*.cfg'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r';.*?$', Comment), - (r'\[.*?\]$', Keyword), - (r'(.*?)(\s*)(=)(\s*)(.*?)$', - bygroups(Name.Attribute, Text, Operator, Text, String)) - ] - } - -The lexer first looks for whitespace, comments and section names. [-And later-] {+Later+} it -looks for a line that looks like a key, value pair, separated by an ``'='`` -sign, and optional whitespace. - -The `bygroups` helper [-makes sure that-] {+yields+} each {+capturing+} group [-is yielded-] {+in the regex+} with a different -token type. First the `Name.Attribute` token, then a `Text` token for the -optional whitespace, after that a `Operator` token for the equals sign. Then a -`Text` token for the whitespace again. The rest of the line is returned as -`String`. - -Note that for this to work, every part of the match must be inside a capturing -group (a ``(...)``), and there must not be any nested capturing groups. If you -nevertheless need a group, use a non-capturing group defined using this syntax: -[-``r'(?:some|words|here)'``-] -{+``(?:some|words|here)``+} (note the ``?:`` after the beginning parenthesis). - -If you find yourself needing a capturing group inside the regex which shouldn't -be part of the output but is used in the regular expressions for backreferencing -(eg: ``r'(<(foo|bar)>)(.*?)()'``), you can pass `None` to the bygroups -function and [-it will skip-] that group will be skipped in the output. - - -Changing states -=============== - -Many lexers need multiple states to work as expected. For example, some -languages allow multiline comments to be nested. Since this is a recursive -pattern it's impossible to lex just using regular expressions. - -Here is [-the solution: - -.. sourcecode:: python-] {+a lexer that recognizes C++ style comments (multi-line with ``/* */`` -and single-line with ``//`` until end of line)::+} - - from pygments.lexer import RegexLexer - from pygments.token import * - - class [-ExampleLexer(RegexLexer):-] {+CppCommentLexer(RegexLexer):+} - name = 'Example Lexer with states' - - tokens = { - 'root': [ - (r'[^/]+', Text), - (r'/\*', Comment.Multiline, 'comment'), - (r'//.*?$', Comment.Singleline), - (r'/', Text) - ], - 'comment': [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) - ] - } - -This lexer starts lexing in the ``'root'`` state. It tries to match as much as -possible until it finds a slash (``'/'``). If the next character after the slash -is [-a star-] {+an asterisk+} (``'*'``) the `RegexLexer` sends those two characters to the -output stream marked as `Comment.Multiline` and continues [-parsing-] {+lexing+} with the rules -defined in the ``'comment'`` state. - -If there wasn't [-a star-] {+an asterisk+} after the slash, the `RegexLexer` checks if it's a -[-singleline-] -{+Singleline+} comment [-(eg:-] {+(i.e.+} followed by a second slash). If this also wasn't the -case it must be a single [-slash-] {+slash, which is not a comment starter+} (the separate -regex for a single slash must also be given, else the slash would be marked as -an error token). - -Inside the ``'comment'`` state, we do the same thing again. Scan until the -lexer finds a star or slash. If it's the opening of a multiline comment, push -the ``'comment'`` state on the stack and continue scanning, again in the -``'comment'`` state. Else, check if it's the end of the multiline comment. If -yes, pop one state from the stack. - -Note: If you pop from an empty stack you'll get an `IndexError`. (There is an -easy way to prevent this from happening: don't ``'#pop'`` in the root state). - -If the `RegexLexer` encounters a newline that is flagged as an error token, the -stack is emptied and the lexer continues scanning in the ``'root'`` state. This -[-helps-] -{+can help+} producing error-tolerant highlighting for erroneous input, e.g. when a -single-line string is not closed. - - -Advanced state tricks -===================== - -There are a few more things you can do with states: - -- You can push multiple states onto the stack if you give a tuple instead of a - simple string as the third item in a rule tuple. For example, if you want to - match a comment containing a directive, something [-like::-] {+like: - - .. code-block:: text+} - - /* rest of comment */ - - you can use this [-rule: - - .. sourcecode:: python-] {+rule::+} - - tokens = { - 'root': [ - (r'/\* <', Comment, ('comment', 'directive')), - ... - ], - 'directive': [ - (r'[^>]*', Comment.Directive), - (r'>', Comment, '#pop'), - ], - 'comment': [ - (r'[^*]+', Comment), - (r'\*/', Comment, '#pop'), - (r'\*', Comment), - ] - } - - When this encounters the above sample, first ``'comment'`` and ``'directive'`` - are pushed onto the stack, then the lexer continues in the directive state - until it finds the closing ``>``, then it continues in the comment state until - the closing ``*/``. Then, both states are popped from the stack again and - lexing continues in the root state. - - .. versionadded:: 0.9 - The tuple can contain the special ``'#push'`` and ``'#pop'`` (but not - ``'#pop:n'``) directives. - - -- You can include the rules of a state in the definition of another. This is - done by using `include` from [-`pygments.lexer`: - - .. sourcecode:: python-] {+`pygments.lexer`::+} - - from pygments.lexer import RegexLexer, bygroups, include - from pygments.token import * - - class ExampleLexer(RegexLexer): - tokens = { - 'comments': [ - (r'/\*.*?\*/', Comment), - (r'//.*?\n', Comment), - ], - 'root': [ - include('comments'), - (r'(function )(\w+)( {)', - bygroups(Keyword, Name, Keyword), 'function'), - (r'.', Text), - ], - 'function': [ - (r'[^}/]+', Text), - include('comments'), - (r'/', Text), - [-(r'}',-] - {+(r'\}',+} Keyword, '#pop'), - ] - } - - This is a hypothetical lexer for a language that consist of functions and - comments. Because comments can occur at toplevel and in functions, we need - rules for comments in both states. As you can see, the `include` helper saves - repeating rules that occur more than once (in this example, the state - ``'comment'`` will never be entered by the lexer, as it's only there to be - included in ``'root'`` and ``'function'``). - -- Sometimes, you may want to "combine" a state from existing ones. This is - possible with the [-`combine`-] {+`combined`+} helper from `pygments.lexer`. - - If you, instead of a new state, write ``combined('state1', 'state2')`` as the - third item of a rule tuple, a new anonymous state will be formed from state1 - and state2 and if the rule matches, the lexer will enter this state. - - This is not used very often, but can be helpful in some cases, such as the - `PythonLexer`'s string literal processing. - -- If you want your lexer to start lexing in a different state you can modify the - stack by [-overloading-] {+overriding+} the `get_tokens_unprocessed()` [-method: - - .. sourcecode:: python-] {+method::+} - - from pygments.lexer import RegexLexer - - class [-MyLexer(RegexLexer):-] {+ExampleLexer(RegexLexer):+} - tokens = {...} - - def get_tokens_unprocessed(self, [-text): - stack = ['root', 'otherstate']-] {+text, stack=('root', 'otherstate')):+} - for item in RegexLexer.get_tokens_unprocessed(text, stack): - yield item - - Some lexers like the `PhpLexer` use this to make the leading ``', Name.Tag), - ], - 'script-content': [ - (r'(.+?)(<\s*/\s*script\s*>)', - bygroups(using(JavascriptLexer), Name.Tag), - '#pop'), - ] - } - -Here the content of a ```` end tag is processed by the `JavascriptLexer`, -while the end tag is yielded as a normal token with the `Name.Tag` type. - -[-As an additional goodie, if the lexer class is replaced by `this` (imported from -`pygments.lexer`), the "other" lexer will be the current one (because you cannot -refer to the current class within the code that runs at class definition time).-] - -Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule. -Here, two states are pushed onto the state stack, ``'script-content'`` and -``'tag'``. That means that first ``'tag'`` is processed, which will [-parse-] {+lex+} -attributes and the closing ``>``, then the ``'tag'`` state is popped and the -next state on top of the stack will be ``'script-content'``. - -{+Since you cannot refer to the class currently being defined, use `this` -(imported from `pygments.lexer`) to refer to the current lexer class, i.e. -``using(this)``. This construct may seem unnecessary, but this is often the -most obvious way of lexing arbitrary syntax between fixed delimiters without -introducing deeply nested states.+} - -The `using()` helper has a special keyword argument, `state`, which works as -follows: if given, the lexer to use initially is not in the ``"root"`` state, -but in the state given by this argument. This [-*only* works-] {+does not work+} with [-a `RegexLexer`.-] {+advanced -`RegexLexer` subclasses such as `ExtendedRegexLexer` (see below).+} - -Any other keywords arguments passed to `using()` are added to the keyword -arguments used to create the lexer. - - -Delegating Lexer -================ - -Another approach for nested lexers is the `DelegatingLexer` which is for example -used for the template engine lexers. It takes two lexers as arguments on -initialisation: a `root_lexer` and a `language_lexer`. - -The input is processed as follows: First, the whole text is lexed with the -`language_lexer`. All tokens yielded with [-a-] {+the special+} type of ``Other`` are -then concatenated and given to the `root_lexer`. The language tokens of the -`language_lexer` are then inserted into the `root_lexer`'s token stream at the -appropriate positions. - -[-.. sourcecode:: python-] {+::+} - - from pygments.lexer import DelegatingLexer - from pygments.lexers.web import HtmlLexer, PhpLexer - - class HtmlPhpLexer(DelegatingLexer): - def __init__(self, **options): - super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options) - -This procedure ensures that e.g. HTML with template tags in it is highlighted -correctly even if the template tags are put into HTML tags or attributes. - -If you want to change the needle token ``Other`` to something else, you can give -the lexer another token type as the third [-parameter: - -.. sourcecode:: python-] {+parameter::+} - - DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options) - - -Callbacks -========= - -Sometimes the grammar of a language is so complex that a lexer would be unable -to [-parse-] {+process+} it just by using regular expressions and stacks. - -For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead -of token types (`bygroups` and `using` are nothing else but preimplemented -callbacks). The callback must be a function taking two arguments: - -* the lexer itself -* the match object for the last matched rule - -The callback must then return an iterable of (or simply yield) ``(index, -tokentype, value)`` tuples, which are then just passed through by -`get_tokens_unprocessed()`. The ``index`` here is the position of the token in -the input string, ``tokentype`` is the normal token type (like `Name.Builtin`), -and ``value`` the associated part of the input string. - -You can see an example [-here: - -.. sourcecode:: python-] {+here::+} - - from pygments.lexer import RegexLexer - from pygments.token import Generic - - class HypotheticLexer(RegexLexer): - - def headline_callback(lexer, match): - equal_signs = match.group(1) - text = match.group(2) - yield match.start(), Generic.Headline, equal_signs + text + equal_signs - - tokens = { - 'root': [ - (r'(=+)(.*?)(\1)', headline_callback) - ] - } - -If the regex for the `headline_callback` matches, the function is called with -the match object. Note that after the callback is done, processing continues -normally, that is, after the end of the previous match. The callback has no -possibility to influence the position. - -There are not really any simple examples for lexer callbacks, but you can see -them in action e.g. in the [-`compiled.py`_ source code-] {+`SMLLexer` class+} in [-the `CLexer` and -`JavaLexer` classes.-] {+`ml.py`_.+} - -.. [-_compiled.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/compiled.py-] {+_ml.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ml.py+} - - -The ExtendedRegexLexer class -============================ - -The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for -the funky syntax rules of [-some-] languages [-that will go unnamed,-] such as Ruby. - -But fear not; even then you don't have to abandon the regular expression -[-approach. For-] -{+approach:+} Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`. -All features known from RegexLexers are available here too, and the tokens are -specified in exactly the same way, *except* for one detail: - -The `get_tokens_unprocessed()` method holds its internal state data not as local -variables, but in an instance of the `pygments.lexer.LexerContext` class, and -that instance is passed to callbacks as a third argument. This means that you -can modify the lexer state in callbacks. - -The `LexerContext` class has the following members: - -* `text` -- the input text -* `pos` -- the current starting position that is used for matching regexes -* `stack` -- a list containing the state stack -* `end` -- the maximum position to which regexes are matched, this defaults to - the length of `text` - -Additionally, the `get_tokens_unprocessed()` method can be given a -`LexerContext` instead of a string and will then process this context instead of -creating a new one for the string argument. - -Note that because you can set the current position to anything in the callback, -it won't be automatically be set by the caller after the callback is finished. -For example, this is how the hypothetical lexer above would be written with the -[-`ExtendedRegexLexer`: - -.. sourcecode:: python-] -{+`ExtendedRegexLexer`::+} - - from pygments.lexer import ExtendedRegexLexer - from pygments.token import Generic - - class ExHypotheticLexer(ExtendedRegexLexer): - - def headline_callback(lexer, match, ctx): - equal_signs = match.group(1) - text = match.group(2) - yield match.start(), Generic.Headline, equal_signs + text + equal_signs - ctx.pos = match.end() - - tokens = { - 'root': [ - (r'(=+)(.*?)(\1)', headline_callback) - ] - } - -This might sound confusing (and it can really be). But it is needed, and for an -example look at the Ruby lexer in [-`agile.py`_.-] {+`ruby.py`_.+} - -.. [-_agile.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/agile.py - - -Filtering-] {+_ruby.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ruby.py - - -Handling Lists of Keywords -========================== - -For a relatively short list (hundreds) you can construct an optimized regular -expression directly using ``words()`` (longer lists, see next section). This -function handles a few things for you automatically, including escaping -metacharacters and Python's first-match rather than longest-match in -alternations. Feel free to put the lists themselves in -``pygments/lexers/_$lang_builtins.py`` (see examples there), and generated by -code if possible. - -An example of using ``words()`` is something like:: - - from pygments.lexer import RegexLexer, words, Name - - class MyLexer(RegexLexer): - - tokens = { - 'root': [ - (words(('else', 'elseif'), suffix=r'\b'), Name.Builtin), - (r'\w+', Name), - ], - } - -As you can see, you can add ``prefix`` and ``suffix`` parts to the constructed -regex. - - -Modifying+} Token Streams -======================= - -Some languages ship a lot of builtin functions (for example PHP). The total -amount of those functions differs from system to system because not everybody -has every extension installed. In the case of PHP there are over 3000 builtin -functions. That's an [-incredible-] {+incredibly+} huge amount of functions, much more than you -[-can-] -{+want to+} put into a regular expression. - -But because only `Name` tokens can be function names [-it's-] {+this is+} solvable by -overriding the ``get_tokens_unprocessed()`` method. The following lexer -subclasses the `PythonLexer` so that it highlights some additional names as -pseudo [-keywords: - -.. sourcecode:: python-] {+keywords::+} - - from [-pygments.lexers.agile-] {+pygments.lexers.python+} import PythonLexer - from pygments.token import Name, Keyword - - class MyPythonLexer(PythonLexer): - EXTRA_KEYWORDS = [-['foo',-] {+set(('foo',+} 'bar', 'foobar', 'barfoo', 'spam', [-'eggs']-] {+'eggs'))+} - - def get_tokens_unprocessed(self, text): - for index, token, value in PythonLexer.get_tokens_unprocessed(self, text): - if token is Name and value in self.EXTRA_KEYWORDS: - yield index, Keyword.Pseudo, value - else: - yield index, token, value - -The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions. - -[-.. note:: Do not confuse this with the :doc:`filter ` system.-] diff --git a/tests/examplefiles/wdiff_example3.wdiff b/tests/examplefiles/wdiff_example3.wdiff deleted file mode 100644 index 0bbd6d65..00000000 --- a/tests/examplefiles/wdiff_example3.wdiff +++ /dev/null @@ -1,10 +0,0 @@ -This example is unbalanced open-close. -We can't treat these easily. - -{+ added? -] -[- deleted? +} - -suddenly closed -] -suddenly closed +} - -{+ added? [- deleted? diff --git a/tests/examplefiles/webkit-transition.css b/tests/examplefiles/webkit-transition.css deleted file mode 100644 index a20b7112..00000000 --- a/tests/examplefiles/webkit-transition.css +++ /dev/null @@ -1,3 +0,0 @@ -p { - -webkit-transition: opacity 1s linear; -} diff --git a/tests/examplefiles/while.pov b/tests/examplefiles/while.pov deleted file mode 100644 index fb182454..00000000 --- a/tests/examplefiles/while.pov +++ /dev/null @@ -1,13 +0,0 @@ -#declare Index1 = 0; -#while(Index1 <= 9) - - #declare Index2 = 0; - #while(Index2 <= 19) - - sphere { , .5 } - - #declare Index2 = Index2 + 1; - #end - - #declare Index1 = Index1 + 1; -#end diff --git a/tests/examplefiles/wiki.factor b/tests/examplefiles/wiki.factor deleted file mode 100644 index d046e91c..00000000 --- a/tests/examplefiles/wiki.factor +++ /dev/null @@ -1,384 +0,0 @@ -! Copyright (C) 2008 Slava Pestov -! See http://factorcode.org/license.txt for BSD license. -USING: accessors kernel hashtables calendar random assocs -namespaces make splitting sequences sorting math.order present -io.files io.directories io.encodings.ascii -syndication farkup -html.components html.forms -http.server -http.server.dispatchers -furnace.actions -furnace.utilities -furnace.redirection -furnace.auth -furnace.auth.login -furnace.boilerplate -furnace.syndication -validators -db.types db.tuples lcs urls ; -IN: webapps.wiki - -: wiki-url ( rest path -- url ) - [ "$wiki/" % % "/" % present % ] "" make - swap >>path ; - -: view-url ( title -- url ) "view" wiki-url ; - -: edit-url ( title -- url ) "edit" wiki-url ; - -: revisions-url ( title -- url ) "revisions" wiki-url ; - -: revision-url ( id -- url ) "revision" wiki-url ; - -: user-edits-url ( author -- url ) "user-edits" wiki-url ; - -TUPLE: wiki < dispatcher ; - -SYMBOL: can-delete-wiki-articles? - -can-delete-wiki-articles? define-capability - -TUPLE: article title revision ; - -article "ARTICLES" { - { "title" "TITLE" { VARCHAR 256 } +not-null+ +user-assigned-id+ } - { "revision" "REVISION" INTEGER +not-null+ } ! revision id -} define-persistent - -:
    ( title -- article ) article new swap >>title ; - -TUPLE: revision id title author date content description ; - -revision "REVISIONS" { - { "id" "ID" INTEGER +db-assigned-id+ } - { "title" "TITLE" { VARCHAR 256 } +not-null+ } ! article id - { "author" "AUTHOR" { VARCHAR 256 } +not-null+ } ! uid - { "date" "DATE" TIMESTAMP +not-null+ } - { "content" "CONTENT" TEXT +not-null+ } - { "description" "DESCRIPTION" TEXT } -} define-persistent - -M: revision feed-entry-title - [ title>> ] [ drop " by " ] [ author>> ] tri 3append ; - -M: revision feed-entry-date date>> ; - -M: revision feed-entry-url id>> revision-url ; - -: reverse-chronological-order ( seq -- sorted ) - [ date>> ] inv-sort-with ; - -: ( id -- revision ) - revision new swap >>id ; - -: validate-title ( -- ) - { { "title" [ v-one-line ] } } validate-params ; - -: validate-author ( -- ) - { { "author" [ v-username ] } } validate-params ; - -: ( responder -- responder' ) - - { wiki "page-common" } >>template ; - -: ( -- action ) - - [ "Front Page" view-url ] >>display ; - -: latest-revision ( title -- revision/f ) -
    select-tuple - dup [ revision>> select-tuple ] when ; - -: ( -- action ) - - - "title" >>rest - - [ validate-title ] >>init - - [ - "title" value dup latest-revision [ - from-object - { wiki "view" } - ] [ - edit-url - ] ?if - ] >>display - - ; - -: ( -- action ) - - - "id" >>rest - - [ - validate-integer-id - "id" value - select-tuple from-object - ] >>init - - { wiki "view" } >>template - - ; - -: ( -- action ) - - [ - article new select-tuples random - [ title>> ] [ "Front Page" ] if* - view-url - ] >>display ; - -: amend-article ( revision article -- ) - swap id>> >>revision update-tuple ; - -: add-article ( revision -- ) - [ title>> ] [ id>> ] bi article boa insert-tuple ; - -: add-revision ( revision -- ) - [ insert-tuple ] - [ - dup title>>
    select-tuple - [ amend-article ] [ add-article ] if* - ] - bi ; - -: ( -- action ) - - - "title" >>rest - - [ - validate-title - - "title" value
    select-tuple - [ revision>> select-tuple ] - [ f "title" value >>title ] - if* - - [ title>> "title" set-value ] - [ content>> "content" set-value ] - bi - ] >>init - - { wiki "edit" } >>template - - ; - -: ( -- action ) - - [ - validate-title - - { - { "content" [ v-required ] } - { "description" [ [ v-one-line ] v-optional ] } - } validate-params - - f - "title" value >>title - now >>date - username >>author - "content" value >>content - "description" value >>description - [ add-revision ] [ title>> view-url ] bi - ] >>submit - - - "edit wiki articles" >>description ; - -: ( responder -- responder ) - - { wiki "revisions-common" } >>template ; - -: list-revisions ( -- seq ) - f "title" value >>title select-tuples - reverse-chronological-order ; - -: ( -- action ) - - - "title" >>rest - - [ - validate-title - list-revisions "revisions" set-value - ] >>init - - { wiki "revisions" } >>template - - - ; - -: ( -- action ) - - - "title" >>rest - - [ validate-title ] >>init - - [ "Revisions of " "title" value append ] >>title - - [ "title" value revisions-url ] >>url - - [ list-revisions ] >>entries ; - -: rollback-description ( description -- description' ) - [ "Rollback of '" "'" surround ] [ "Rollback" ] if* ; - -: ( -- action ) - - - [ validate-integer-id ] >>validate - - [ - "id" value select-tuple - f >>id - now >>date - username >>author - [ rollback-description ] change-description - [ add-revision ] - [ title>> revisions-url ] bi - ] >>submit - - - "rollback wiki articles" >>description ; - -: list-changes ( -- seq ) - f select-tuples - reverse-chronological-order ; - -: ( -- action ) - - [ list-changes "revisions" set-value ] >>init - { wiki "changes" } >>template - - ; - -: ( -- action ) - - [ URL" $wiki/changes" ] >>url - [ "All changes" ] >>title - [ list-changes ] >>entries ; - -: ( -- action ) - - - [ validate-title ] >>validate - - [ - "title" value
    delete-tuples - f "title" value >>title delete-tuples - URL" $wiki" - ] >>submit - - - "delete wiki articles" >>description - { can-delete-wiki-articles? } >>capabilities ; - -: ( -- action ) - - - [ - { - { "old-id" [ v-integer ] } - { "new-id" [ v-integer ] } - } validate-params - - "old-id" "new-id" - [ value select-tuple ] bi@ - [ - over title>> "title" set-value - [ "old" [ from-object ] nest-form ] - [ "new" [ from-object ] nest-form ] - bi* - ] - [ [ content>> string-lines ] bi@ diff "diff" set-value ] - 2bi - ] >>init - - { wiki "diff" } >>template - - ; - -: ( -- action ) - - - [ - f
    select-tuples - [ title>> ] sort-with - "articles" set-value - ] >>init - - { wiki "articles" } >>template ; - -: list-user-edits ( -- seq ) - f "author" value >>author select-tuples - reverse-chronological-order ; - -: ( -- action ) - - - "author" >>rest - - [ - validate-author - list-user-edits "revisions" set-value - ] >>init - - { wiki "user-edits" } >>template - - ; - -: ( -- action ) - - "author" >>rest - [ validate-author ] >>init - [ "Edits by " "author" value append ] >>title - [ "author" value user-edits-url ] >>url - [ list-user-edits ] >>entries ; - -: init-sidebars ( -- ) - "Contents" latest-revision [ "contents" [ from-object ] nest-form ] when* - "Footer" latest-revision [ "footer" [ from-object ] nest-form ] when* ; - -: init-relative-link-prefix ( -- ) - URL" $wiki/view/" adjust-url present relative-link-prefix set ; - -: ( -- dispatcher ) - wiki new-dispatcher - "" add-responder - "view" add-responder - "revision" add-responder - "random" add-responder - "revisions" add-responder - "revisions.atom" add-responder - "diff" add-responder - "edit" add-responder - "submit" add-responder - "rollback" add-responder - "user-edits" add-responder - "articles" add-responder - "changes" add-responder - "user-edits.atom" add-responder - "changes.atom" add-responder - "delete" add-responder - - [ init-sidebars init-relative-link-prefix ] >>init - { wiki "wiki-common" } >>template ; - -: init-wiki ( -- ) - "resource:extra/webapps/wiki/initial-content" [ - [ - dup ".txt" ?tail [ - swap ascii file-contents - f - swap >>content - swap >>title - "slava" >>author - now >>date - add-revision - ] [ 2drop ] if - ] each - ] with-directory-files ; \ No newline at end of file diff --git a/tests/examplefiles/xml_example b/tests/examplefiles/xml_example deleted file mode 100644 index e657e564..00000000 --- a/tests/examplefiles/xml_example +++ /dev/null @@ -1,1897 +0,0 @@ - - - - - - abort - abs - abstract - accept - access - aliased - all - and - array - at - begin - body - constant - declare - delay - delta - digits - do - else - elsif - end - entry - exception - exit - for - function - generic - goto - in - is - limited - mod - new - not - null - of - or - others - out - package - pragma - private - procedure - protected - raise - range - rem - record - renames - requeue - return - reverse - separate - subtype - tagged - task - terminate - then - type - until - use - when - while - with - xor - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - BEGIN - END - if - else - while - do - for - in - continue - break - print - printf - getline - function - return - next - exit - - - ARGC - ARGV - CONVFMT - ENVIRON - FILENAME - FNR - FS - NF - NR - OFMT - OFS - ORS - RS - RSTART - RLENGTH - SUBSEP - - - gsub - index - length - match - split - sprintf - sub - substr - tolower - toupper - atan2 - cos - exp - int - log - rand - sin - sqrt - srand - close - fflush - system - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - break - case - else - esac - exit - export - for - function - in - return - select - then - until - while - . - done - do - elif - fi - if - - - - cp - date - echo - eval - dcop - dcopstart - dcopfind - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - break - case - continue - default - do - else - enum - extern - for - goto - if - inline - return - sizeof - struct - switch - typedef - union - while - - - auto - char - const - double - float - int - long - register - restrict - short - signed - static - unsigned - void - volatile - _Imaginary - _Complex - _Bool - - - FIXME - TODO - ### - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - aaa - access-list - address - alias - arp - async-bootp - banner - boot - bridge - buffers - busy-message - call-history-mib - cdp - chat-script - class-map - clock - cns - config-register - controller - crypto - default - default-value - dialer - dialer-list - dnsix-dmdp - dnsix-nat - downward-compatible-config - enable - end - exception - exit - file - frame-relay - help - hostname - interface - ip - isdn - isdn-mib - kerberos - key - line - logging - login-string - map-class - map-list - memory-size - menu - modemcap - multilink - netbios - no - ntp - partition - policy-map - priority-list - privilege - process-max-time - prompt - queue-list - resume-string - rlogin - rmon - route-map - router - rtr - scheduler - service - snmp-server - sntp - stackmaker - state-machine - subscriber-policy - tacacs-server - template - terminal-queue - tftp-server - time-range - username - virtual-profile - virtual-template - vpdn - vpdn-group - x25 - x29 - - - accounting - accounting-list - accounting-threshold - accounting-transits - address-pool - as-path - audit - auth-proxy - authentication - authorization - bgp-community - bootp - cef - classless - community-list - default-gateway - default-network - dhcp - dhcp-server - domain-list - domain-lookup - domain-name - dvmrp - exec-callback - extcommunity-list - finger - flow-aggregation - flow-cache - flow-export - forward-protocol - ftp - gratuitous-arps - host - host-routing - hp-host - http - icmp - inspect - local - mrm - mroute - msdp - multicast - multicast-routing - name-server - nat - new-model - ospf - password - password-encryption - pgm - pim - port-map - prefix-list - radius - rcmd - reflexive-list - route - routing - rsvp - rtcp - sap - sdr - security - source-route - subnet-zero - tacacs - tcp - tcp-small-servers - telnet - tftp - timestamps - udp-small-servers - vrf - wccp - - - accounting - accounting-list - accounting-threshold - accounting-transits - address-pool - as-path - audit - auth-proxy - authentication - authorization - bgp-community - bootp - cef - classless - community-list - default-gateway - default-network - dhcp - dhcp-server - domain-list - domain-lookup - domain-name - dvmrp - exec-callback - extcommunity-list - finger - flow-aggregation - flow-cache - flow-export - forward-protocol - ftp - gratuitous-arps - host - host-routing - hp-host - http - icmp - inspect - local - mrm - mroute - msdp - multicast - multicast-routing - name-server - nat - new-model - ospf - password - password-encryption - pgm - pim - port-map - prefix-list - radius - rcmd - reflexive-list - route - routing - rsvp - rtcp - sap - sdr - security - source-route - subnet-zero - tacacs - tcp - tcp-small-servers - telnet - tftp - timestamps - udp-small-servers - vrf - wccp - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if - else - for - in - while - do - continue - break - with - try - catch - switch - case - new - var - function - return - this - delete - true - false - void - throw - typeof - const - default - - - - - - Anchor - Applet - Area - Array - Boolean - Button - Checkbox - Date - Document - Event - FileUpload - Form - Frame - Function - Hidden - History - Image - Layer - Linke - Location - Math - Navigator - Number - Object - Option - Password - Radio - RegExp - Reset - Screen - Select - String - Submit - Text - Textarea - Window - - - - - - abs - acos - alert - anchor - apply - asin - atan - atan2 - back - blur - call - captureEvents - ceil - charAt - charCodeAt - clearInterval - clearTimeout - click - close - compile - concat - confirm - cos - disableExternalCapture - enableExternalCapture - eval - exec - exp - find - floor - focus - forward - fromCharCode - getDate - getDay - getFullYear - getHours - getMilliseconds - getMinutes - getMonth - getSeconds - getSelection - getTime - getTimezoneOffset - getUTCDate - getUTCDay - getUTCFullYear - getUTCHours - getUTCMilliseconds - getUTCMinutes - getUTCMonth - getUTCSeconds - go - handleEvent - home - indexOf - javaEnabled - join - lastIndexOf - link - load - log - match - max - min - moveAbove - moveBelow - moveBy - moveTo - moveToAbsolute - open - parse - plugins.refresh - pop - pow - preference - print - prompt - push - random - releaseEvents - reload - replace - reset - resizeBy - resizeTo - reverse - round - routeEvent - scrollBy - scrollTo - search - select - setDate - setFullYear - setHours - setInterval - setMilliseconds - setMinutes - setMonth - setSeconds - setTime - setTimeout - setUTCDate - setUTCFullYear - setUTCHours - setUTCMilliseconds - setUTCMinutes - setUTCMonth - setUTCSeconds - shift - sin - slice - sort - splice - split - sqrt - stop - String formatting - submit - substr - substring - taintEnabled - tan - test - toLocaleString - toLowerCase - toSource - toString - toUpperCase - toUTCString - unshift - unwatch - UTC - valueOf - watch - write - writeln - - - - - - break - case - catch - continue - default - do - else - for - function - if - in - return - switch - try - var - while - - - - - - Abs - ACos - ArrayAppend - ArrayAvg - ArrayClear - ArrayDeleteAt - ArrayInsertAt - ArrayIsEmpty - ArrayLen - ArrayMax - ArrayMin - ArrayNew - ArrayPrepend - ArrayResize - ArraySet - ArraySort - ArraySum - ArraySwap - ArrayToList - Asc - ASin - Atn - BitAnd - BitMaskClear - BitMaskRead - BitMaskSet - BitNot - BitOr - BitSHLN - BitSHRN - BitXor - Ceiling - Chr - CJustify - Compare - CompareNoCase - Cos - CreateDate - CreateDateTime - CreateObject - CreateODBCDate - CreateODBCDateTime - CreateODBCTime - CreateTime - CreateTimeSpan - CreateUUID - DateAdd - DateCompare - DateConvert - DateDiff - DateFormat - DatePart - Day - DayOfWeek - DayOfWeekAsString - DayOfYear - DaysInMonth - DaysInYear - DE - DecimalFormat - DecrementValue - Decrypt - DeleteClientVariable - DirectoryExists - DollarFormat - Duplicate - Encrypt - Evaluate - Exp - ExpandPath - FileExists - Find - FindNoCase - FindOneOf - FirstDayOfMonth - Fix - FormatBaseN - GetAuthUser - GetBaseTagData - GetBaseTagList - GetBaseTemplatePath - GetClientVariablesList - GetCurrentTemplatePath - GetDirectoryFromPath - GetException - GetFileFromPath - GetFunctionList - GetHttpRequestData - GetHttpTimeString - GetK2ServerDocCount - GetK2ServerDocCountLimit - GetLocale - GetMetaData - GetMetricData - GetPageContext - GetProfileSections - GetProfileString - GetServiceSettings - GetTempDirectory - GetTempFile - GetTemplatePath - GetTickCount - GetTimeZoneInfo - GetToken - Hash - Hour - HTMLCodeFormat - HTMLEditFormat - IIf - IncrementValue - InputBaseN - Insert - Int - IsArray - IsBinary - IsBoolean - IsCustomFunction - IsDate - IsDebugMode - IsDefined - IsK2ServerABroker - IsK2ServerDocCountExceeded - IsK2ServerOnline - IsLeapYear - IsNumeric - IsNumericDate - IsObject - IsQuery - IsSimpleValue - IsStruct - IsUserInRole - IsWDDX - IsXmlDoc - IsXmlElement - IsXmlRoot - JavaCast - JSStringFormat - LCase - Left - Len - ListAppend - ListChangeDelims - ListContains - ListContainsNoCase - ListDeleteAt - ListFind - ListFindNoCase - ListFirst - ListGetAt - ListInsertAt - ListLast - ListLen - ListPrepend - ListQualify - ListRest - ListSetAt - ListSort - ListToArray - ListValueCount - ListValueCountNoCase - LJustify - Log - Log10 - LSCurrencyFormat - LSDateFormat - LSEuroCurrencyFormat - LSIsCurrency - LSIsDate - LSIsNumeric - LSNumberFormat - LSParseCurrency - LSParseDateTime - LSParseEuroCurrency - LSParseNumber - LSTimeFormat - LTrim - Max - Mid - Min - Minute - Month - MonthAsString - Now - NumberFormat - ParagraphFormat - ParameterExists - ParseDateTime - Pi - PreserveSingleQuotes - Quarter - QueryAddColumn - QueryAddRow - QueryNew - QuerySetCell - QuotedValueList - Rand - Randomize - RandRange - REFind - REFindNoCase - RemoveChars - RepeatString - Replace - ReplaceList - ReplaceNoCase - REReplace - REReplaceNoCase - Reverse - Right - RJustify - Round - RTrim - Second - SetEncoding - SetLocale - SetProfileString - SetVariable - Sgn - Sin - SpanExcluding - SpanIncluding - Sqr - StripCR - StructAppend - StructClear - StructCopy - StructCount - StructDelete - StructFind - StructFindKey - StructFindValue - StructGet - StructInsert - StructIsEmpty - StructKeyArray - StructKeyExists - StructKeyList - StructNew - StructSort - StructUpdate - Tan - TimeFormat - ToBase64 - ToBinary - ToString - Trim - UCase - URLDecode - URLEncodedFormat - URLSessionFormat - Val - ValueList - Week - WriteOutput - XmlChildPos - XmlElemNew - XmlFormat - XmlNew - XmlParse - XmlSearch - XmlTransform - Year - YesNoFormat - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - BEGIN - BY - CASE - CLOSE - CONST - DO - ELSE - ELSIF - END - FOR - IF - IMPORT - LOOP - MODULE - NEW - OF - OUT - PROCEDURE - REPEAT - THEN - TO - TYPE - UNTIL - VAR - WHILE - WITH - - - ASSERT - EXIT - HALT - RETURN - - - ANYPTR - ANYREC - ARRAY - BOOLEAN - SHORTCHAR - CHAR - BYTE - SHORTINT - INTEGER - LONGINT - POINTER - RECORD - SHORTREAL - REAL - SET - - - ABSTRACT - EMPTY - EXTENSIBLE - LIMITED - - - ABS - ASH - BITS - CAP - CHR - DEC - ENTIER - EXCL - INC - INCL - LEN - LONG - MAX - MIN - ODD - ORD - SHORT - SIZE - - - FALSE - INF - NIL - TRUE - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/examplefiles/xorg.conf b/tests/examplefiles/xorg.conf deleted file mode 100644 index e1f7164b..00000000 --- a/tests/examplefiles/xorg.conf +++ /dev/null @@ -1,48 +0,0 @@ -Section "Files" - ModulePath "/usr/lib64/opengl/nvidia/extensions" - ModulePath "/usr/lib64/xorg/modules" -EndSection - -Section "ServerLayout" - Identifier "XFree86 Configured" - Screen "Screen" -EndSection - -Section "ServerFlags" - Option "AutoAddDevices" "false" -EndSection - -Section "Screen" - Identifier "Screen" - Device "Card0" - DefaultDepth 24 - SubSection "Display" - Depth 24 - EndSubSection - Option "UseEDIDDpi" "False" - Option "DPI" "96 x 96" -EndSection - -Section "Device" - Identifier "Card0" - Driver "nvidia" - VendorName "NVIDIA Corporation" # inline comment - #Option "RenderAccel" "true" - - #Option "NvAgp" "3" - #Option "AllowGLXWithComposite" "true" - #Option "AddARGBGLXVisuals" "true" - #Option "XAANoOffscreenPixmaps" "true" - #Option "DRI" "true" - - #Option "UseEvents" "false" - #Option "TripleBuffer" "1" - #Option "DamageEvents" "1" - ##Option "BackingStore" "1" - #Option "PixmapCacheSize" "70000" - #Option "OnDemandVBlankInterrupts" "true" -EndSection - -Section "Extensions" -# Option "Composite" "Disabled" -EndSection diff --git a/tests/examplefiles/yahalom.cpsa b/tests/examplefiles/yahalom.cpsa deleted file mode 100644 index 3bc918d4..00000000 --- a/tests/examplefiles/yahalom.cpsa +++ /dev/null @@ -1,34 +0,0 @@ -(herald "Yahalom Protocol with Forwarding Removed") - -(defprotocol yahalom basic - (defrole init - (vars (a b c name) (n-a n-b text) (k skey)) - (trace (send (cat a n-a)) - (recv (enc b k n-a n-b (ltk a c))) - (send (enc n-b k)))) - (defrole resp - (vars (b a c name) (n-a n-b text) (k skey)) - (trace (recv (cat a n-a)) - (send (cat b (enc a n-a n-b (ltk b c)))) - (recv (enc a k (ltk b c))) - (recv (enc n-b k)))) - (defrole serv - (vars (c a b name) (n-a n-b text) (k skey)) - (trace (recv (cat b (enc a n-a n-b (ltk b c)))) - (send (enc b k n-a n-b (ltk a c))) - (send (enc a k (ltk b c)))) - (uniq-orig k))) - -(defskeleton yahalom - (vars (a b c name) (n-b text)) - (defstrand resp 4 (a a) (b b) (c c) (n-b n-b)) - (non-orig (ltk b c) (ltk a c)) - (uniq-orig n-b)) - -;;; Ensure encryption key remains secret. -(defskeleton yahalom - (vars (a b c name) (n-b text) (k skey)) - (defstrand resp 4 (a a) (b b) (c c) (n-b n-b) (k k)) - (deflistener k) - (non-orig (ltk b c) (ltk a c)) - (uniq-orig n-b)) diff --git a/tests/examplefiles/zmlrpc.f90 b/tests/examplefiles/zmlrpc.f90 deleted file mode 100644 index 441497b3..00000000 --- a/tests/examplefiles/zmlrpc.f90 +++ /dev/null @@ -1,798 +0,0 @@ -!!$ -!!$ -!!$ MD2P4 -!!$ Multilevel Domain Decomposition Parallel Preconditioner Package for PSBLAS -!!$ for -!!$ Parallel Sparse BLAS v2.0 -!!$ (C) Copyright 2006 Salvatore Filippone University of Rome Tor Vergata -!!$ Alfredo Buttari University of Rome Tor Vergata -!!$ Daniela Di Serafino II University of Naples -!!$ Pasqua D'Ambra ICAR-CNR -!!$ -!!$ Redistribution and use in source and binary forms, with or without -!!$ modification, are permitted provided that the following conditions -!!$ are met: -!!$ 1. Redistributions of source code must retain the above copyright -!!$ notice, this list of conditions and the following disclaimer. -!!$ 2. Redistributions in binary form must reproduce the above copyright -!!$ notice, this list of conditions, and the following disclaimer in the -!!$ documentation and/or other materials provided with the distribution. -!!$ 3. The name of the MD2P4 group or the names of its contributors may -!!$ not be used to endorse or promote products derived from this -!!$ software without specific written permission. -!!$ -!!$ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -!!$ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -!!$ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -!!$ PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE MD2P4 GROUP OR ITS CONTRIBUTORS -!!$ BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -!!$ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -!!$ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -!!$ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -!!$ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -!!$ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -!!$ POSSIBILITY OF SUCH DAMAGE. -!!$ -!!$ -subroutine psb_zmlprc_aply(alpha,baseprecv,x,beta,y,desc_data,trans,work,info) - ! - ! Compute Y <- beta*Y + alpha*K^-1 X - ! where K is a multilevel preconditioner stored in baseprecv - ! - ! cfr.: Smith, Biorstad & Gropp - ! Domain Decomposition - ! Cambridge Univ. Press - ! - ! To each level I there corresponds a matrix A(I) and a preconditioner K(I) - ! - ! A notational difference: in the DD reference above the preconditioner for - ! a given level K(I) is written out as a sum over the subdomains - ! - ! SUM_k(R_k^T A_k R_k) - ! - ! whereas in this code the sum is implicit in the parallelization, - ! i.e. each process takes care of one subdomain, and for each level we have - ! as many subdomains as there are processes (except for the coarsest level where - ! we might have a replicated index space). Thus the sum apparently disappears - ! from our code, but only apparently, because it is implicit in the call - ! to psb_baseprc_aply. - ! - ! A bit of description of the baseprecv(:) data structure: - ! 1. Number of levels = NLEV = size(baseprecv(:)) - ! 2. baseprecv(ilev)%av(:) sparse matrices needed for the current level. - ! Includes: - ! 2.1.: baseprecv(ilev)%av(l_pr_) L factor of ILU preconditioners - ! 2.2.: baseprecv(ilev)%av(u_pr_) U factor of ILU preconditioners - ! 2.3.: baseprecv(ilev)%av(ap_nd_) Off-diagonal part of A for Jacobi sweeps - ! 2.4.: baseprecv(ilev)%av(ac_) Aggregated matrix of level ILEV - ! 2.5.: baseprecv(ilev)%av(sm_pr_t_) Smoother prolongator transpose; maps vectors - ! (ilev-1) ---> (ilev) - ! 2.6.: baseprecv(ilev)%av(sm_pr_) Smoother prolongator; maps vectors - ! (ilev) ---> (ilev-1) - ! Shouldn't we keep just one of them and handle transpose in the sparse BLAS? maybe - ! - ! 3. baseprecv(ilev)%desc_data comm descriptor for level ILEV - ! 4. baseprecv(ilev)%base_a Pointer (really a pointer!) to the base matrix - ! of the current level, i.e.: if ILEV=1 then A - ! else the aggregated matrix av(ac_); so we have - ! a unified treatment of residuals. Need this to - ! avoid passing explicitly matrix A to the - ! outer prec. routine - ! 5. baseprecv(ilev)%mlia The aggregation map from (ilev-1)-->(ilev) - ! if no smoother, it is used instead of sm_pr_ - ! 6. baseprecv(ilev)%nlaggr Number of aggregates on the various procs. - ! - - use psb_serial_mod - use psb_descriptor_type - use psb_prec_type - use psb_psblas_mod - use psb_penv_mod - use psb_const_mod - use psb_error_mod - use psb_penv_mod - implicit none - - type(psb_desc_type),intent(in) :: desc_data - type(psb_zbaseprc_type), intent(in) :: baseprecv(:) - complex(kind(1.d0)),intent(in) :: alpha,beta - complex(kind(1.d0)),intent(inout) :: x(:), y(:) - character :: trans - complex(kind(1.d0)),target :: work(:) - integer, intent(out) :: info - - - ! Local variables - integer :: n_row,n_col - complex(kind(1.d0)), allocatable :: tx(:),ty(:),t2l(:),w2l(:),& - & x2l(:),b2l(:),tz(:),tty(:) - character ::diagl, diagu - integer :: ictxt,np,me,i, isz, nrg,nr2l,err_act, iptype, int_err(5) - real(kind(1.d0)) :: omega - real(kind(1.d0)) :: t1, t2, t3, t4, t5, t6, t7, mpi_wtime - logical, parameter :: debug=.false., debugprt=.false. - integer :: ismth, nlev, ilev - external mpi_wtime - character(len=20) :: name, ch_err - - type psb_mlprec_wrk_type - complex(kind(1.d0)), pointer :: tx(:)=>null(),ty(:)=>null(),& - & x2l(:)=>null(),y2l(:)=>null(),& - & b2l(:)=>null(),tty(:)=>null() - end type psb_mlprec_wrk_type - type(psb_mlprec_wrk_type), pointer :: mlprec_wrk(:) - - interface psb_baseprc_aply - subroutine psb_zbaseprc_aply(alpha,prec,x,beta,y,desc_data,trans,work,info) - use psb_descriptor_type - use psb_prec_type - type(psb_desc_type),intent(in) :: desc_data - type(psb_zbaseprc_type), intent(in) :: prec - complex(kind(1.d0)),intent(inout) :: x(:), y(:) - complex(kind(1.d0)),intent(in) :: alpha,beta - character(len=1) :: trans - complex(kind(1.d0)),target :: work(:) - integer, intent(out) :: info - end subroutine psb_zbaseprc_aply - end interface - - name='psb_mlprc_aply' - info = 0 - call psb_erractionsave(err_act) - - - ictxt=desc_data%matrix_data(psb_ctxt_) - call psb_info(ictxt, me, np) - - nlev = size(baseprecv) - allocate(mlprec_wrk(nlev),stat=info) - if (info /= 0) then - call psb_errpush(4010,name,a_err='Allocate') - goto 9999 - end if - - - select case(baseprecv(2)%iprcparm(ml_type_)) - - case(no_ml_) - ! Should not really get here. - call psb_errpush(4010,name,a_err='no_ml_ in mlprc_aply?') - goto 9999 - - - case(add_ml_prec_) - - - ! - ! Additive is very simple. - ! 1. X(1) = Xext - ! 2. DO ILEV=2,NLEV - ! X(ILEV) = AV(PR_SM_T_)*X(ILEV-1) - ! 3. Y(ILEV) = (K(ILEV)**(-1))*X(ILEV) - ! 4. DO ILEV=NLEV-1,1,-1 - ! Y(ILEV) = AV(PR_SM_)*Y(ILEV+1) - ! 5. Yext = beta*Yext + Y(1) - ! - ! Note: level numbering reversed wrt ref. DD, i.e. - ! 1..NLEV <=> (j) <-> 0 - - - call psb_baseprc_aply(alpha,baseprecv(1),x,beta,y,& - & baseprecv(1)%base_desc,trans,work,info) - if(info /=0) goto 9999 - allocate(mlprec_wrk(1)%x2l(size(x)),mlprec_wrk(1)%y2l(size(y))) - mlprec_wrk(1)%x2l(:) = x(:) - - - do ilev = 2, nlev - n_row = baseprecv(ilev-1)%base_desc%matrix_data(psb_n_row_) - n_col = baseprecv(ilev-1)%desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(ilev)%desc_data%matrix_data(psb_n_col_) - nrg = baseprecv(ilev)%desc_data%matrix_data(psb_n_row_) - allocate(mlprec_wrk(ilev)%x2l(nr2l),mlprec_wrk(ilev)%y2l(nr2l),& - & mlprec_wrk(ilev)%tx(max(n_row,n_col)),& - & mlprec_wrk(ilev)%ty(max(n_row,n_col)), stat=info) - if (info /= 0) then - call psb_errpush(4010,name,a_err='Allocate') - goto 9999 - end if - - mlprec_wrk(ilev)%x2l(:) = zzero - mlprec_wrk(ilev)%y2l(:) = zzero - mlprec_wrk(ilev)%tx(1:n_row) = mlprec_wrk(ilev-1)%x2l(1:n_row) - mlprec_wrk(ilev)%tx(n_row+1:max(n_row,n_col)) = zzero - mlprec_wrk(ilev)%ty(:) = zzero - - ismth=baseprecv(ilev)%iprcparm(smth_kind_) - - if (ismth /= no_smth_) then - ! - ! Smoothed aggregation - ! - - - if (baseprecv(ilev)%iprcparm(glb_smth_) >0) then - call psb_halo(mlprec_wrk(ilev-1)%x2l,baseprecv(ilev-1)%base_desc,& - & info,work=work) - if(info /=0) goto 9999 - else - mlprec_wrk(ilev-1)%x2l(n_row+1:max(n_row,n_col)) = zzero - end if - - call psb_csmm(zone,baseprecv(ilev)%av(sm_pr_t_),mlprec_wrk(ilev-1)%x2l,& - & zzero,mlprec_wrk(ilev)%x2l,info) - if(info /=0) goto 9999 - - else - ! - ! Raw aggregation, may take shortcut - ! - do i=1,n_row - mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) = & - & mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) + & - & mlprec_wrk(ilev-1)%x2l(i) - end do - - end if - - if (baseprecv(ilev)%iprcparm(coarse_mat_)==mat_repl_) Then - call psb_sum(ictxt,mlprec_wrk(ilev)%x2l(1:nrg)) - else if (baseprecv(ilev)%iprcparm(coarse_mat_) /= mat_distr_) Then - write(0,*) 'Unknown value for baseprecv(2)%iprcparm(coarse_mat_) ',& - & baseprecv(ilev)%iprcparm(coarse_mat_) - endif - - call psb_baseprc_aply(zone,baseprecv(ilev),& - & mlprec_wrk(ilev)%x2l,zzero,mlprec_wrk(ilev)%y2l,& - & baseprecv(ilev)%desc_data, 'N',work,info) - - enddo - - do ilev =nlev,2,-1 - - ismth=baseprecv(ilev)%iprcparm(smth_kind_) - n_row = baseprecv(ilev-1)%base_desc%matrix_data(psb_n_row_) - n_col = baseprecv(ilev-1)%desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(ilev)%desc_data%matrix_data(psb_n_col_) - nrg = baseprecv(ilev)%desc_data%matrix_data(psb_n_row_) - - if (ismth /= no_smth_) then - - call psb_csmm(zone,baseprecv(ilev)%av(sm_pr_),mlprec_wrk(ilev)%y2l,& - & zone,mlprec_wrk(ilev-1)%y2l,info) - if(info /=0) goto 9999 - - else - - do i=1, n_row - mlprec_wrk(ilev-1)%y2l(i) = mlprec_wrk(ilev-1)%y2l(i) + & - & mlprec_wrk(ilev)%y2l(baseprecv(ilev)%mlia(i)) - enddo - - end if - end do - - call psb_geaxpby(alpha,mlprec_wrk(1)%y2l,zone,y,baseprecv(1)%base_desc,info) - if(info /=0) goto 9999 - - - case(mult_ml_prec_) - - ! - ! Multiplicative multilevel - ! Pre/post smoothing versions. - ! - - select case(baseprecv(2)%iprcparm(smth_pos_)) - - case(post_smooth_) - - - ! - ! Post smoothing. - ! 1. X(1) = Xext - ! 2. DO ILEV=2, NLEV :: X(ILEV) = AV(PR_SM_T_,ILEV)*X(ILEV-1) - ! 3. Y(NLEV) = (K(NLEV)**(-1))*X(NLEV) - ! 4. DO ILEV=NLEV-1,1,-1 - ! Y(ILEV) = AV(PR_SM_,ILEV+1)*Y(ILEV+1) - ! Y(ILEV) = Y(ILEV) + (K(ILEV)**(-1))*(X(ILEV)-A(ILEV)*Y(ILEV)) - ! - ! 5. Yext = beta*Yext + Y(1) - ! - ! Note: level numbering reversed wrt ref. DD, i.e. - ! 1..NLEV <=> (j) <-> 0 - ! - ! Also: post smoothing is not spelled out in detail in DD. - ! - ! - - - n_col = desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(1)%desc_data%matrix_data(psb_n_col_) - - allocate(mlprec_wrk(1)%x2l(nr2l),mlprec_wrk(1)%y2l(nr2l), & - & mlprec_wrk(1)%tx(nr2l), stat=info) - mlprec_wrk(1)%x2l(:) = zzero - mlprec_wrk(1)%y2l(:) = zzero - mlprec_wrk(1)%tx(:) = zzero - - call psb_geaxpby(zone,x,zzero,mlprec_wrk(1)%tx,& - & baseprecv(1)%base_desc,info) - call psb_geaxpby(zone,x,zzero,mlprec_wrk(1)%x2l,& - & baseprecv(1)%base_desc,info) - - do ilev=2, nlev - n_row = baseprecv(ilev-1)%base_desc%matrix_data(psb_n_row_) - n_col = baseprecv(ilev-1)%desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(ilev)%desc_data%matrix_data(psb_n_col_) - nrg = baseprecv(ilev)%desc_data%matrix_data(psb_n_row_) - ismth = baseprecv(ilev)%iprcparm(smth_kind_) - - allocate(mlprec_wrk(ilev)%tx(nr2l),mlprec_wrk(ilev)%y2l(nr2l),& - & mlprec_wrk(ilev)%x2l(nr2l), stat=info) - - if (info /= 0) then - call psb_errpush(4010,name,a_err='Allocate') - goto 9999 - end if - - mlprec_wrk(ilev)%x2l(:) = zzero - mlprec_wrk(ilev)%y2l(:) = zzero - mlprec_wrk(ilev)%tx(:) = zzero - if (ismth /= no_smth_) then - ! - ! Smoothed aggregation - ! - if (baseprecv(ilev)%iprcparm(glb_smth_) >0) then - call psb_halo(mlprec_wrk(ilev-1)%x2l,& - & baseprecv(ilev-1)%base_desc,info,work=work) - if(info /=0) goto 9999 - else - mlprec_wrk(ilev-1)%x2l(n_row+1:max(n_row,n_col)) = zzero - end if - - call psb_csmm(zone,baseprecv(ilev)%av(sm_pr_t_),mlprec_wrk(ilev-1)%x2l, & - & zzero,mlprec_wrk(ilev)%x2l,info) - if(info /=0) goto 9999 - - else - ! - ! Raw aggregation, may take shortcut - ! - do i=1,n_row - mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) = & - & mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) + & - & mlprec_wrk(ilev-1)%x2l(i) - end do - end if - - if (baseprecv(ilev)%iprcparm(coarse_mat_)==mat_repl_) Then - call psb_sum(ictxt,mlprec_wrk(ilev)%x2l(1:nrg)) - else if (baseprecv(ilev)%iprcparm(coarse_mat_) /= mat_distr_) Then - write(0,*) 'Unknown value for baseprecv(2)%iprcparm(coarse_mat_) ',& - & baseprecv(ilev)%iprcparm(coarse_mat_) - endif - call psb_geaxpby(zone,mlprec_wrk(ilev)%x2l,zzero,mlprec_wrk(ilev)%tx,& - & baseprecv(ilev)%base_desc,info) - if(info /=0) goto 9999 - - enddo - - - call psb_baseprc_aply(zone,baseprecv(nlev),mlprec_wrk(nlev)%x2l, & - & zzero, mlprec_wrk(nlev)%y2l,baseprecv(nlev)%desc_data,'N',work,info) - - if(info /=0) goto 9999 - - - do ilev=nlev-1, 1, -1 - ismth = baseprecv(ilev+1)%iprcparm(smth_kind_) - if (ismth /= no_smth_) then - if (ismth == smth_omg_) & - & call psb_halo(mlprec_wrk(ilev+1)%y2l,baseprecv(ilev+1)%desc_data,& - & info,work=work) - call psb_csmm(zone,baseprecv(ilev+1)%av(sm_pr_),mlprec_wrk(ilev+1)%y2l,& - & zzero,mlprec_wrk(ilev)%y2l,info) - if(info /=0) goto 9999 - - else - n_row = baseprecv(ilev)%base_desc%matrix_data(psb_n_row_) - mlprec_wrk(ilev)%y2l(:) = zzero - do i=1, n_row - mlprec_wrk(ilev)%y2l(i) = mlprec_wrk(ilev)%y2l(i) + & - & mlprec_wrk(ilev+1)%y2l(baseprecv(ilev+1)%mlia(i)) - enddo - - end if - - call psb_spmm(-zone,baseprecv(ilev)%base_a,mlprec_wrk(ilev)%y2l,& - & zone,mlprec_wrk(ilev)%tx,baseprecv(ilev)%base_desc,info,work=work) - - if(info /=0) goto 9999 - - call psb_baseprc_aply(zone,baseprecv(ilev),mlprec_wrk(ilev)%tx,& - & zone,mlprec_wrk(ilev)%y2l,baseprecv(ilev)%base_desc, trans, work,info) - - if(info /=0) goto 9999 - - enddo - - call psb_geaxpby(alpha,mlprec_wrk(1)%y2l,beta,y,baseprecv(1)%base_desc,info) - - if(info /=0) goto 9999 - - - case(pre_smooth_) - - - ! - ! Pre smoothing. - ! 1. X(1) = Xext - ! 2. Y(1) = (K(1)**(-1))*X(1) - ! 3. TX(1) = X(1) - A(1)*Y(1) - ! 4. DO ILEV=2, NLEV - ! X(ILEV) = AV(PR_SM_T_,ILEV)*TX(ILEV-1) - ! Y(ILEV) = (K(ILEV)**(-1))*X(ILEV) - ! TX(ILEV) = (X(ILEV)-A(ILEV)*Y(ILEV)) - ! 5. DO ILEV=NLEV-1,1,-1 - ! Y(ILEV) = Y(ILEV) + AV(PR_SM_,ILEV+1)*Y(ILEV+1) - ! 6. Yext = beta*Yext + Y(1) - ! - ! Note: level numbering reversed wrt ref. DD, i.e. - ! 1..NLEV <=> (j) <-> 0 - ! - ! - - n_col = desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(1)%desc_data%matrix_data(psb_n_col_) - - allocate(mlprec_wrk(1)%x2l(nr2l),mlprec_wrk(1)%y2l(nr2l), & - & mlprec_wrk(1)%tx(nr2l), stat=info) - if (info /= 0) then - call psb_errpush(4010,name,a_err='Allocate') - goto 9999 - end if - - mlprec_wrk(1)%y2l(:) = zzero - - - mlprec_wrk(1)%x2l(:) = x - - call psb_baseprc_aply(zone,baseprecv(1),mlprec_wrk(1)%x2l,& - & zzero,mlprec_wrk(1)%y2l,& - & baseprecv(1)%base_desc,& - & trans,work,info) - - if(info /=0) goto 9999 - - mlprec_wrk(1)%tx = mlprec_wrk(1)%x2l - - call psb_spmm(-zone,baseprecv(1)%base_a,mlprec_wrk(1)%y2l,& - & zone,mlprec_wrk(1)%tx,baseprecv(1)%base_desc,info,work=work) - if(info /=0) goto 9999 - - do ilev = 2, nlev - n_row = baseprecv(ilev-1)%base_desc%matrix_data(psb_n_row_) - n_col = baseprecv(ilev-1)%desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(ilev)%desc_data%matrix_data(psb_n_col_) - nrg = baseprecv(ilev)%desc_data%matrix_data(psb_n_row_) - ismth = baseprecv(ilev)%iprcparm(smth_kind_) - allocate(mlprec_wrk(ilev)%tx(nr2l),mlprec_wrk(ilev)%y2l(nr2l),& - & mlprec_wrk(ilev)%x2l(nr2l), stat=info) - - - if (info /= 0) then - call psb_errpush(4010,name,a_err='Allocate') - goto 9999 - end if - - mlprec_wrk(ilev)%x2l(:) = zzero - mlprec_wrk(ilev)%y2l(:) = zzero - mlprec_wrk(ilev)%tx(:) = zzero - - - if (ismth /= no_smth_) then - ! - !Smoothed Aggregation - ! - if (baseprecv(ilev)%iprcparm(glb_smth_) >0) then - - call psb_halo(mlprec_wrk(ilev-1)%tx,baseprecv(ilev-1)%base_desc,& - & info,work=work) - if(info /=0) goto 9999 - else - mlprec_wrk(ilev-1)%tx(n_row+1:max(n_row,n_col)) = zzero - end if - - call psb_csmm(zone,baseprecv(ilev)%av(sm_pr_t_),mlprec_wrk(ilev-1)%tx,zzero,& - & mlprec_wrk(ilev)%x2l,info) - if(info /=0) goto 9999 - - else - ! - ! Raw aggregation, may take shortcuts - ! - mlprec_wrk(ilev)%x2l = zzero - do i=1,n_row - mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) = & - & mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) + & - & mlprec_wrk(ilev-1)%tx(i) - end do - end if - - if (baseprecv(ilev)%iprcparm(coarse_mat_)==mat_repl_) then - call psb_sum(ictxt,mlprec_wrk(ilev)%x2l(1:nrg)) - else if (baseprecv(ilev)%iprcparm(coarse_mat_) /= mat_distr_) then - write(0,*) 'Unknown value for baseprecv(2)%iprcparm(coarse_mat_) ',& - & baseprecv(ilev)%iprcparm(coarse_mat_) - endif - - - call psb_baseprc_aply(zone,baseprecv(ilev),mlprec_wrk(ilev)%x2l,& - & zzero,mlprec_wrk(ilev)%y2l,baseprecv(ilev)%desc_data, 'N',work,info) - - if(info /=0) goto 9999 - - if(ilev < nlev) then - mlprec_wrk(ilev)%tx = mlprec_wrk(ilev)%x2l - call psb_spmm(-zone,baseprecv(ilev)%base_a,mlprec_wrk(ilev)%y2l,& - & zone,mlprec_wrk(ilev)%tx,baseprecv(ilev)%base_desc,info,work=work) - if(info /=0) goto 9999 - endif - - enddo - - do ilev = nlev-1, 1, -1 - - ismth=baseprecv(ilev+1)%iprcparm(smth_kind_) - - if (ismth /= no_smth_) then - - if (ismth == smth_omg_) & - & call psb_halo(mlprec_wrk(ilev+1)%y2l,& - & baseprecv(ilev+1)%desc_data,info,work=work) - call psb_csmm(zone,baseprecv(ilev+1)%av(sm_pr_),mlprec_wrk(ilev+1)%y2l,& - & zone,mlprec_wrk(ilev)%y2l,info) - - if(info /=0) goto 9999 - - else - - n_row = baseprecv(ilev+1)%base_desc%matrix_data(psb_n_row_) - do i=1, n_row - mlprec_wrk(ilev)%y2l(i) = mlprec_wrk(ilev)%y2l(i) + & - & mlprec_wrk(ilev+1)%y2l(baseprecv(ilev+1)%mlia(i)) - enddo - - end if - - enddo - - call psb_geaxpby(alpha,mlprec_wrk(1)%y2l,beta,y,& - & baseprecv(1)%base_desc,info) - - if(info /=0) goto 9999 - - - - case(smooth_both_) - - ! - ! Symmetrized smoothing. - ! 1. X(1) = Xext - ! 2. Y(1) = (K(1)**(-1))*X(1) - ! 3. TX(1) = X(1) - A(1)*Y(1) - ! 4. DO ILEV=2, NLEV - ! X(ILEV) = AV(PR_SM_T_,ILEV)*TX(ILEV-1) - ! Y(ILEV) = (K(ILEV)**(-1))*X(ILEV) - ! TX(ILEV) = (X(ILEV)-A(ILEV)*Y(ILEV)) - ! 5. DO ILEV=NLEV-1,1,-1 - ! Y(ILEV) = Y(ILEV) + AV(PR_SM_,ILEV+1)*Y(ILEV+1) - ! Y(ILEV) = Y(ILEV) + (K(ILEV)**(-1))*(X(ILEV)-A(ILEV)*Y(ILEV)) - ! 6. Yext = beta*Yext + Y(1) - ! - ! Note: level numbering reversed wrt ref. DD, i.e. - ! 1..NLEV <=> (j) <-> 0 - ! - ! - n_col = desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(1)%desc_data%matrix_data(psb_n_col_) - - allocate(mlprec_wrk(1)%x2l(nr2l),mlprec_wrk(1)%y2l(nr2l), & - & mlprec_wrk(1)%ty(nr2l), mlprec_wrk(1)%tx(nr2l), stat=info) - - mlprec_wrk(1)%x2l(:) = zzero - mlprec_wrk(1)%y2l(:) = zzero - mlprec_wrk(1)%tx(:) = zzero - mlprec_wrk(1)%ty(:) = zzero - - - if (info /= 0) then - call psb_errpush(4010,name,a_err='Allocate') - goto 9999 - end if - - call psb_geaxpby(zone,x,zzero,mlprec_wrk(1)%x2l,& - & baseprecv(1)%base_desc,info) - call psb_geaxpby(zone,x,zzero,mlprec_wrk(1)%tx,& - & baseprecv(1)%base_desc,info) - - call psb_baseprc_aply(zone,baseprecv(1),mlprec_wrk(1)%x2l,& - & zzero,mlprec_wrk(1)%y2l,& - & baseprecv(1)%base_desc,& - & trans,work,info) - - if(info /=0) goto 9999 - - mlprec_wrk(1)%ty = mlprec_wrk(1)%x2l - - call psb_spmm(-zone,baseprecv(1)%base_a,mlprec_wrk(1)%y2l,& - & zone,mlprec_wrk(1)%ty,baseprecv(1)%base_desc,info,work=work) - if(info /=0) goto 9999 - - do ilev = 2, nlev - n_row = baseprecv(ilev-1)%base_desc%matrix_data(psb_n_row_) - n_col = baseprecv(ilev-1)%desc_data%matrix_data(psb_n_col_) - nr2l = baseprecv(ilev)%desc_data%matrix_data(psb_n_col_) - nrg = baseprecv(ilev)%desc_data%matrix_data(psb_n_row_) - ismth=baseprecv(ilev)%iprcparm(smth_kind_) - allocate(mlprec_wrk(ilev)%ty(nr2l),mlprec_wrk(ilev)%y2l(nr2l),& - & mlprec_wrk(ilev)%x2l(nr2l), stat=info) - - mlprec_wrk(ilev)%x2l(:) = zzero - mlprec_wrk(ilev)%y2l(:) = zzero - mlprec_wrk(ilev)%tx(:) = zzero - mlprec_wrk(ilev)%ty(:) = zzero - - - if (info /= 0) then - call psb_errpush(4010,name,a_err='Allocate') - goto 9999 - end if - - - if (ismth /= no_smth_) then - ! - !Smoothed Aggregation - ! - if (baseprecv(ilev)%iprcparm(glb_smth_) >0) then - - call psb_halo(mlprec_wrk(ilev-1)%ty,baseprecv(ilev-1)%base_desc,& - & info,work=work) - if(info /=0) goto 9999 - else - mlprec_wrk(ilev-1)%ty(n_row+1:max(n_row,n_col)) = zzero - end if - - call psb_csmm(zone,baseprecv(ilev)%av(sm_pr_t_),mlprec_wrk(ilev-1)%ty,zzero,& - & mlprec_wrk(ilev)%x2l,info) - if(info /=0) goto 9999 - - else - ! - ! Raw aggregation, may take shortcuts - ! - mlprec_wrk(ilev)%x2l = zzero - do i=1,n_row - mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) = & - & mlprec_wrk(ilev)%x2l(baseprecv(ilev)%mlia(i)) + & - & mlprec_wrk(ilev-1)%ty(i) - end do - end if - - if (baseprecv(ilev)%iprcparm(coarse_mat_)==mat_repl_) then - call psb_sum(ictxt,mlprec_wrk(ilev)%x2l(1:nrg)) - else if (baseprecv(ilev)%iprcparm(coarse_mat_) /= mat_distr_) then - write(0,*) 'Unknown value for baseprecv(2)%iprcparm(coarse_mat_) ',& - & baseprecv(ilev)%iprcparm(coarse_mat_) - endif - - call psb_geaxpby(zone,mlprec_wrk(ilev)%x2l,zzero,mlprec_wrk(ilev)%tx,& - & baseprecv(ilev)%base_desc,info) - if(info /=0) goto 9999 - - call psb_baseprc_aply(zone,baseprecv(ilev),mlprec_wrk(ilev)%x2l,& - & zzero,mlprec_wrk(ilev)%y2l,baseprecv(ilev)%desc_data, 'N',work,info) - - if(info /=0) goto 9999 - - if(ilev < nlev) then - mlprec_wrk(ilev)%ty = mlprec_wrk(ilev)%x2l - call psb_spmm(-zone,baseprecv(ilev)%base_a,mlprec_wrk(ilev)%y2l,& - & zone,mlprec_wrk(ilev)%ty,baseprecv(ilev)%base_desc,info,work=work) - if(info /=0) goto 9999 - endif - - enddo - - - do ilev=nlev-1, 1, -1 - - ismth=baseprecv(ilev+1)%iprcparm(smth_kind_) - if (ismth /= no_smth_) then - if (ismth == smth_omg_) & - & call psb_halo(mlprec_wrk(ilev+1)%y2l,baseprecv(ilev+1)%desc_data,& - & info,work=work) - call psb_csmm(zone,baseprecv(ilev+1)%av(sm_pr_),mlprec_wrk(ilev+1)%y2l,& - & zone,mlprec_wrk(ilev)%y2l,info) - if(info /=0) goto 9999 - - else - n_row = baseprecv(ilev)%base_desc%matrix_data(psb_n_row_) - do i=1, n_row - mlprec_wrk(ilev)%y2l(i) = mlprec_wrk(ilev)%y2l(i) + & - & mlprec_wrk(ilev+1)%y2l(baseprecv(ilev+1)%mlia(i)) - enddo - - end if - - call psb_spmm(-zone,baseprecv(ilev)%base_a,mlprec_wrk(ilev)%y2l,& - & zone,mlprec_wrk(ilev)%tx,baseprecv(ilev)%base_desc,info,work=work) - - if(info /=0) goto 9999 - - call psb_baseprc_aply(zone,baseprecv(ilev),mlprec_wrk(ilev)%tx,& - & zone,mlprec_wrk(ilev)%y2l,baseprecv(ilev)%base_desc, trans, work,info) - - if(info /=0) goto 9999 - - enddo - - call psb_geaxpby(alpha,mlprec_wrk(1)%y2l,beta,y,& - & baseprecv(1)%base_desc,info) - - if(info /=0) goto 9999 - - - case default - - call psb_errpush(4013,name,a_err='wrong smooth_pos',& - & i_Err=(/baseprecv(2)%iprcparm(smth_pos_),0,0,0,0/)) - goto 9999 - - end select - - case default - call psb_errpush(4013,name,a_err='wrong mltype',& - & i_Err=(/baseprecv(2)%iprcparm(ml_type_),0,0,0,0/)) - goto 9999 - - end select - - - call mlprec_wrk_free(mlprec_wrk) - deallocate(mlprec_wrk) - - call psb_erractionrestore(err_act) - return - -9999 continue - call psb_errpush(info,name) - call psb_erractionrestore(err_act) - if (err_act.eq.act_abort) then - call psb_error() - return - end if - return - -contains - subroutine mlprec_wrk_free(wrk) - type(psb_mlprec_wrk_type) :: wrk(:) - ! This will not be needed when we have allocatables, as - ! it is sufficient to deallocate the container, and - ! the compiler is supposed to recursively deallocate the - ! various components. - integer i - - do i=1, size(wrk) - if (associated(wrk(i)%tx)) deallocate(wrk(i)%tx) - if (associated(wrk(i)%ty)) deallocate(wrk(i)%ty) - if (associated(wrk(i)%x2l)) deallocate(wrk(i)%x2l) - if (associated(wrk(i)%y2l)) deallocate(wrk(i)%y2l) - if (associated(wrk(i)%b2l)) deallocate(wrk(i)%b2l) - if (associated(wrk(i)%tty)) deallocate(wrk(i)%tty) - end do - end subroutine mlprec_wrk_free - -end subroutine psb_zmlprc_aply - diff --git a/tests/run.py b/tests/run.py deleted file mode 100644 index edebc7a1..00000000 --- a/tests/run.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Pygments unit tests - ~~~~~~~~~~~~~~~~~~ - - Usage:: - - python run.py [testfile ...] - - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from __future__ import print_function - -import os -import sys -import warnings - -# only find tests in this directory -if os.path.dirname(__file__): - os.chdir(os.path.dirname(__file__)) - -# make FutureWarnings (coming from Regex syntax most likely) and -# DeprecationWarnings due to non-raw strings an error -warnings.filterwarnings("error", module=r"pygments\..*", - category=FutureWarning) -warnings.filterwarnings("error", module=r".*pygments.*", - category=DeprecationWarning) - - -try: - import nose -except ImportError: - print('nose is required to run the Pygments test suite') - sys.exit(1) - -# make sure the current source is first on sys.path -sys.path.insert(0, '..') - -if '--with-coverage' not in sys.argv: - # if running with coverage, pygments should not be imported before coverage - # is started, otherwise it will count already executed lines as uncovered - try: - import pygments - except ImportError as err: - print('Cannot find Pygments to test: %s' % err) - sys.exit(1) - else: - print('Pygments %s test suite running (Python %s)...' % - (pygments.__version__, sys.version.split()[0]), - file=sys.stderr) -else: - print('Pygments test suite running (Python %s)...' % sys.version.split()[0], - file=sys.stderr) - -nose.main() diff --git a/tests/string_asserts.py b/tests/string_asserts.py deleted file mode 100644 index a02c52bb..00000000 --- a/tests/string_asserts.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Pygments string assert utility - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -class StringTests(object): - - def assertStartsWith(self, haystack, needle, msg=None): - if msg is None: - msg = "'{0}' does not start with '{1}'".format(haystack, needle) - if not haystack.startswith(needle): - raise(AssertionError(msg)) - - def assertEndsWith(self, haystack, needle, msg=None): - if msg is None: - msg = "'{0}' does not end with '{1}'".format(haystack, needle) - if not haystack.endswith(needle): - raise(AssertionError(msg)) diff --git a/tests/support.py b/tests/support.py deleted file mode 100644 index c66ac663..00000000 --- a/tests/support.py +++ /dev/null @@ -1,17 +0,0 @@ -# coding: utf-8 -""" -Support for Pygments tests -""" - -import os - -from nose import SkipTest - - -def location(mod_name): - """ - Return the file and directory that the code for *mod_name* is in. - """ - source = mod_name.endswith("pyc") and mod_name[:-1] or mod_name - source = os.path.abspath(source) - return source, os.path.dirname(source) diff --git a/tests/support/empty.py b/tests/support/empty.py deleted file mode 100644 index 40a96afc..00000000 --- a/tests/support/empty.py +++ /dev/null @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/tests/support/html_formatter.py b/tests/support/html_formatter.py deleted file mode 100644 index 169cd4af..00000000 --- a/tests/support/html_formatter.py +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -from pygments.formatters import HtmlFormatter - - -class HtmlFormatterWrapper(HtmlFormatter): - name = 'HtmlWrapper' diff --git a/tests/support/python_lexer.py b/tests/support/python_lexer.py deleted file mode 100644 index 565ee674..00000000 --- a/tests/support/python_lexer.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -# pygments.lexers.python (as CustomLexer) for test_cmdline.py - -from pygments.lexers import PythonLexer - - -class CustomLexer(PythonLexer): - name = 'PythonLexerWrapper' - - -class LexerWrapper(CustomLexer): - name = 'PythonLexerWrapperWrapper' diff --git a/tests/support/tags b/tests/support/tags deleted file mode 100644 index 193779f6..00000000 --- a/tests/support/tags +++ /dev/null @@ -1,36 +0,0 @@ -!_TAG_FILE_FORMAT 2 /extended format; --format=1 will not append ;" to lines/ -!_TAG_FILE_SORTED 1 /0=unsorted, 1=sorted, 2=foldcase/ -!_TAG_PROGRAM_AUTHOR Darren Hiebert /dhiebert@users.sourceforge.net/ -!_TAG_PROGRAM_NAME Exuberant Ctags // -!_TAG_PROGRAM_URL http://ctags.sourceforge.net /official site/ -!_TAG_PROGRAM_VERSION 5.8 // -HtmlFormatter test_html_formatter.py 19;" i -HtmlFormatterTest test_html_formatter.py 34;" c -NullFormatter test_html_formatter.py 19;" i -PythonLexer test_html_formatter.py 18;" i -StringIO test_html_formatter.py 13;" i -dirname test_html_formatter.py 16;" i -escape_html test_html_formatter.py 20;" i -fp test_html_formatter.py 27;" v -inspect test_html_formatter.py 15;" i -isfile test_html_formatter.py 16;" i -join test_html_formatter.py 16;" i -os test_html_formatter.py 10;" i -re test_html_formatter.py 11;" i -subprocess test_html_formatter.py 125;" i -support test_html_formatter.py 23;" i -tempfile test_html_formatter.py 14;" i -test_all_options test_html_formatter.py 72;" m class:HtmlFormatterTest -test_correct_output test_html_formatter.py 35;" m class:HtmlFormatterTest -test_ctags test_html_formatter.py 165;" m class:HtmlFormatterTest -test_external_css test_html_formatter.py 48;" m class:HtmlFormatterTest -test_get_style_defs test_html_formatter.py 141;" m class:HtmlFormatterTest -test_lineanchors test_html_formatter.py 98;" m class:HtmlFormatterTest -test_lineanchors_with_startnum test_html_formatter.py 106;" m class:HtmlFormatterTest -test_linenos test_html_formatter.py 82;" m class:HtmlFormatterTest -test_linenos_with_startnum test_html_formatter.py 90;" m class:HtmlFormatterTest -test_unicode_options test_html_formatter.py 155;" m class:HtmlFormatterTest -test_valid_output test_html_formatter.py 114;" m class:HtmlFormatterTest -tokensource test_html_formatter.py 29;" v -uni_open test_html_formatter.py 21;" i -unittest test_html_formatter.py 12;" i diff --git a/tests/test_asm.py b/tests/test_asm.py deleted file mode 100644 index 30a008a1..00000000 --- a/tests/test_asm.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Basic ColdfusionHtmlLexer Test - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest -import os - -from pygments.token import Token -from pygments.lexers import NasmLexer - - -class NasmLexerTest(unittest.TestCase): - - def setUp(self): - self.lexer = NasmLexer() - - def testCPUID(self): - # CPU is a valid directive, and we don't want to parse this as - # cpu id, but as a single token. See bug #1517 - fragment = 'cpuid' - expected = [ - (Token.Name.Function, u'cpuid'), - (Token.Text, u'\n'), - ] - self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) diff --git a/tests/test_basic.py b/tests/test_basic.py deleted file mode 100644 index 03d10cd2..00000000 --- a/tests/test_basic.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Pygments Basic lexers tests - ~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" -import unittest - -from pygments.lexers.basic import VBScriptLexer -from pygments.token import Error, Name, Number, Punctuation, String, Whitespace - - -class VBScriptLexerTest(unittest.TestCase): - - def setUp(self): - self.lexer = VBScriptLexer() - - def _assert_are_tokens_of_type(self, examples, expected_token_type): - for test_number, example in enumerate(examples.split(), 1): - token_count = 0 - for token_type, token_value in self.lexer.get_tokens(example): - if token_type != Whitespace: - token_count += 1 - self.assertEqual( - token_type, expected_token_type, - 'token_type #%d for %s is be %s but must be %s' % - (test_number, token_value, token_type, expected_token_type)) - self.assertEqual( - token_count, 1, - '%s must yield exactly 1 token instead of %d' % - (example, token_count)) - - def _assert_tokens_match(self, text, expected_tokens_without_trailing_newline): - actual_tokens = tuple(self.lexer.get_tokens(text)) - if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')): - actual_tokens = tuple(actual_tokens[:-1]) - self.assertEqual( - expected_tokens_without_trailing_newline, actual_tokens, - 'text must yield expected tokens: %s' % text) - - def test_can_lex_float(self): - self._assert_are_tokens_of_type( - '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2', Number.Float) - self._assert_tokens_match( - '1e2.1e2', - ((Number.Float, '1e2'), (Number.Float, '.1e2')) - ) - - def test_can_reject_almost_float(self): - self._assert_tokens_match( - '.e1', - ((Punctuation, '.'), (Name, 'e1'))) - - def test_can_lex_integer(self): - self._assert_are_tokens_of_type( - '1 23 456', Number.Integer) - - def test_can_lex_names(self): - self._assert_are_tokens_of_type( - u'thingy thingy123 _thingy _123', Name) - - def test_can_recover_after_unterminated_string(self): - self._assert_tokens_match( - '"x\nx', - ((String.Double, '"'), (String.Double, 'x'), (Error, '\n'), (Name, 'x')) - ) - - def test_can_recover_from_invalid_character(self): - self._assert_tokens_match( - 'a;bc\nd', - ((Name, 'a'), (Error, ';bc\n'), (Name, 'd')) - ) diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py deleted file mode 100644 index b1b69267..00000000 --- a/tests/test_basic_api.py +++ /dev/null @@ -1,334 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Pygments basic API tests - ~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from __future__ import print_function - -import random -import unittest - -from pygments import lexers, formatters, lex, format -from pygments.token import _TokenType, Text -from pygments.lexer import RegexLexer -from pygments.formatters.img import FontNotFound -from pygments.util import text_type, StringIO, BytesIO, xrange, ClassNotFound - -import support - -TESTFILE, TESTDIR = support.location(__file__) - -test_content = [chr(i) for i in xrange(33, 128)] * 5 -random.shuffle(test_content) -test_content = ''.join(test_content) + '\n' - - -def test_lexer_instantiate_all(): - # instantiate every lexer, to see if the token type defs are correct - def verify(name): - getattr(lexers, name) - for x in lexers.LEXERS: - yield verify, x - - -def test_lexer_classes(): - # test that every lexer class has the correct public API - def verify(cls): - assert type(cls.name) is str - for attr in 'aliases', 'filenames', 'alias_filenames', 'mimetypes': - assert hasattr(cls, attr) - assert type(getattr(cls, attr)) is list, \ - "%s: %s attribute wrong" % (cls, attr) - result = cls.analyse_text("abc") - assert isinstance(result, float) and 0.0 <= result <= 1.0 - result = cls.analyse_text(".abc") - assert isinstance(result, float) and 0.0 <= result <= 1.0 - - assert all(al.lower() == al for al in cls.aliases) - - inst = cls(opt1="val1", opt2="val2") - if issubclass(cls, RegexLexer): - if not hasattr(cls, '_tokens'): - # if there's no "_tokens", the lexer has to be one with - # multiple tokendef variants - assert cls.token_variants - for variant in cls.tokens: - assert 'root' in cls.tokens[variant] - else: - assert 'root' in cls._tokens, \ - '%s has no root state' % cls - - if cls.name in ['XQuery', 'Opa']: # XXX temporary - return - - try: - tokens = list(inst.get_tokens(test_content)) - except KeyboardInterrupt: - raise KeyboardInterrupt( - 'interrupted %s.get_tokens(): test_content=%r' % - (cls.__name__, test_content)) - txt = "" - for token in tokens: - assert isinstance(token, tuple) - assert isinstance(token[0], _TokenType) - assert isinstance(token[1], text_type) - txt += token[1] - assert txt == test_content, "%s lexer roundtrip failed: %r != %r" % \ - (cls.name, test_content, txt) - - for lexer in lexers._iter_lexerclasses(plugins=False): - yield verify, lexer - - -def test_lexer_options(): - # test that the basic options work - def ensure(tokens, output): - concatenated = ''.join(token[1] for token in tokens) - assert concatenated == output, \ - '%s: %r != %r' % (lexer, concatenated, output) - - def verify(cls): - inst = cls(stripnl=False) - ensure(inst.get_tokens('a\nb'), 'a\nb\n') - ensure(inst.get_tokens('\n\n\n'), '\n\n\n') - inst = cls(stripall=True) - ensure(inst.get_tokens(' \n b\n\n\n'), 'b\n') - # some lexers require full lines in input - if ('ConsoleLexer' not in cls.__name__ and - 'SessionLexer' not in cls.__name__ and - not cls.__name__.startswith('Literate') and - cls.__name__ not in ('ErlangShellLexer', 'RobotFrameworkLexer')): - inst = cls(ensurenl=False) - ensure(inst.get_tokens('a\nb'), 'a\nb') - inst = cls(ensurenl=False, stripall=True) - ensure(inst.get_tokens('a\nb\n\n'), 'a\nb') - - for lexer in lexers._iter_lexerclasses(plugins=False): - if lexer.__name__ == 'RawTokenLexer': - # this one is special - continue - yield verify, lexer - - -def test_get_lexers(): - # test that the lexers functions work - def verify(func, args): - x = func(opt='val', *args) - assert isinstance(x, lexers.PythonLexer) - assert x.options["opt"] == "val" - - for func, args in [(lexers.get_lexer_by_name, ("python",)), - (lexers.get_lexer_for_filename, ("test.py",)), - (lexers.get_lexer_for_mimetype, ("text/x-python",)), - (lexers.guess_lexer, ("#!/usr/bin/python -O\nprint",)), - (lexers.guess_lexer_for_filename, ("a.py", "<%= @foo %>")) - ]: - yield verify, func, args - - for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.items(): - assert cls == lexers.find_lexer_class(lname).__name__ - - for alias in aliases: - assert cls == lexers.get_lexer_by_name(alias).__class__.__name__ - - for mimetype in mimetypes: - assert cls == lexers.get_lexer_for_mimetype(mimetype).__class__.__name__ - - try: - lexers.get_lexer_by_name(None) - except ClassNotFound: - pass - else: - raise Exception - - -def test_formatter_public_api(): - # test that every formatter class has the correct public API - ts = list(lexers.PythonLexer().get_tokens("def f(): pass")) - string_out = StringIO() - bytes_out = BytesIO() - - def verify(formatter): - info = formatters.FORMATTERS[formatter.__name__] - assert len(info) == 5 - assert info[1], "missing formatter name" - assert info[2], "missing formatter aliases" - assert info[4], "missing formatter docstring" - - try: - inst = formatter(opt1="val1") - except (ImportError, FontNotFound) as e: - raise support.SkipTest(e) - - try: - inst.get_style_defs() - except NotImplementedError: - # may be raised by formatters for which it doesn't make sense - pass - - if formatter.unicodeoutput: - inst.format(ts, string_out) - else: - inst.format(ts, bytes_out) - - for name in formatters.FORMATTERS: - formatter = getattr(formatters, name) - yield verify, formatter - - -def test_formatter_encodings(): - from pygments.formatters import HtmlFormatter - - # unicode output - fmt = HtmlFormatter() - tokens = [(Text, u"ä")] - out = format(tokens, fmt) - assert type(out) is text_type - assert u"ä" in out - - # encoding option - fmt = HtmlFormatter(encoding="latin1") - tokens = [(Text, u"ä")] - assert u"ä".encode("latin1") in format(tokens, fmt) - - # encoding and outencoding option - fmt = HtmlFormatter(encoding="latin1", outencoding="utf8") - tokens = [(Text, u"ä")] - assert u"ä".encode("utf8") in format(tokens, fmt) - - -def test_formatter_unicode_handling(): - # test that the formatter supports encoding and Unicode - tokens = list(lexers.PythonLexer(encoding='utf-8'). - get_tokens("def f(): 'ä'")) - - def verify(formatter): - try: - inst = formatter(encoding=None) - except (ImportError, FontNotFound) as e: - # some dependency or font not installed - raise support.SkipTest(e) - - if formatter.name != 'Raw tokens': - out = format(tokens, inst) - if formatter.unicodeoutput: - assert type(out) is text_type, '%s: %r' % (formatter, out) - - inst = formatter(encoding='utf-8') - out = format(tokens, inst) - assert type(out) is bytes, '%s: %r' % (formatter, out) - # Cannot test for encoding, since formatters may have to escape - # non-ASCII characters. - else: - inst = formatter() - out = format(tokens, inst) - assert type(out) is bytes, '%s: %r' % (formatter, out) - - for formatter, info in formatters.FORMATTERS.items(): - # this tests the automatic importing as well - fmter = getattr(formatters, formatter) - yield verify, fmter - - -def test_get_formatters(): - # test that the formatters functions work - x = formatters.get_formatter_by_name("html", opt="val") - assert isinstance(x, formatters.HtmlFormatter) - assert x.options["opt"] == "val" - - x = formatters.get_formatter_for_filename("a.html", opt="val") - assert isinstance(x, formatters.HtmlFormatter) - assert x.options["opt"] == "val" - - -def test_styles(): - # minimal style test - from pygments.formatters import HtmlFormatter - HtmlFormatter(style="pastie") - - -def test_bare_class_handler(): - from pygments.formatters import HtmlFormatter - from pygments.lexers import PythonLexer - try: - lex('test\n', PythonLexer) - except TypeError as e: - assert 'lex() argument must be a lexer instance' in str(e) - else: - assert False, 'nothing raised' - try: - format([], HtmlFormatter) - except TypeError as e: - assert 'format() argument must be a formatter instance' in str(e) - else: - assert False, 'nothing raised' - - -class FiltersTest(unittest.TestCase): - - def test_basic(self): - filters_args = [ - ('whitespace', {'spaces': True, 'tabs': True, 'newlines': True}), - ('whitespace', {'wstokentype': False, 'spaces': True}), - ('highlight', {'names': ['isinstance', 'lexers', 'x']}), - ('codetagify', {'codetags': 'API'}), - ('keywordcase', {'case': 'capitalize'}), - ('raiseonerror', {}), - ('gobble', {'n': 4}), - ('tokenmerge', {}), - ] - for x, args in filters_args: - lx = lexers.PythonLexer() - lx.add_filter(x, **args) - with open(TESTFILE, 'rb') as fp: - text = fp.read().decode('utf-8') - tokens = list(lx.get_tokens(text)) - self.assertTrue(all(isinstance(t[1], text_type) - for t in tokens), - '%s filter did not return Unicode' % x) - roundtext = ''.join([t[1] for t in tokens]) - if x not in ('whitespace', 'keywordcase', 'gobble'): - # these filters change the text - self.assertEqual(roundtext, text, - "lexer roundtrip with %s filter failed" % x) - - def test_raiseonerror(self): - lx = lexers.PythonLexer() - lx.add_filter('raiseonerror', excclass=RuntimeError) - self.assertRaises(RuntimeError, list, lx.get_tokens('$')) - - def test_whitespace(self): - lx = lexers.PythonLexer() - lx.add_filter('whitespace', spaces='%') - with open(TESTFILE, 'rb') as fp: - text = fp.read().decode('utf-8') - lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))]) - self.assertFalse(' ' in lxtext) - - def test_keywordcase(self): - lx = lexers.PythonLexer() - lx.add_filter('keywordcase', case='capitalize') - with open(TESTFILE, 'rb') as fp: - text = fp.read().decode('utf-8') - lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))]) - self.assertTrue('Def' in lxtext and 'Class' in lxtext) - - def test_codetag(self): - lx = lexers.PythonLexer() - lx.add_filter('codetagify') - text = u'# BUG: text' - tokens = list(lx.get_tokens(text)) - self.assertEqual('# ', tokens[0][1]) - self.assertEqual('BUG', tokens[1][1]) - - def test_codetag_boundary(self): - # ticket #368 - lx = lexers.PythonLexer() - lx.add_filter('codetagify') - text = u'# DEBUG: text' - tokens = list(lx.get_tokens(text)) - self.assertEqual('# DEBUG: text', tokens[0][1]) diff --git a/tests/test_bibtex.py b/tests/test_bibtex.py deleted file mode 100644 index 2f1c395a..00000000 --- a/tests/test_bibtex.py +++ /dev/null @@ -1,236 +0,0 @@ -# -*- coding: utf-8 -*- -""" - BibTeX Test - ~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import textwrap -import unittest - -from pygments.lexers import BibTeXLexer, BSTLexer -from pygments.token import Token - - -class BibTeXTest(unittest.TestCase): - def setUp(self): - self.lexer = BibTeXLexer() - - def testPreamble(self): - data = u'@PREAMBLE{"% some LaTeX code here"}' - tokens = [ - (Token.Name.Class, u'@PREAMBLE'), - (Token.Punctuation, u'{'), - (Token.String, u'"'), - (Token.String, u'% some LaTeX code here'), - (Token.String, u'"'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(list(self.lexer.get_tokens(data)), tokens) - - def testString(self): - data = u'@STRING(SCI = "Science")' - tokens = [ - (Token.Name.Class, u'@STRING'), - (Token.Punctuation, u'('), - (Token.Name.Attribute, u'SCI'), - (Token.Text, u' '), - (Token.Punctuation, u'='), - (Token.Text, u' '), - (Token.String, u'"'), - (Token.String, u'Science'), - (Token.String, u'"'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - ] - self.assertEqual(list(self.lexer.get_tokens(data)), tokens) - - def testEntry(self): - data = u""" - This is a comment. - - @ARTICLE{ruckenstein-diffusion, - author = "Liu, Hongquin" # and # "Ruckenstein, Eli", - year = 1997, - month = JAN, - pages = "888-895" - } - """ - - tokens = [ - (Token.Comment, u'This is a comment.'), - (Token.Text, u'\n\n'), - (Token.Name.Class, u'@ARTICLE'), - (Token.Punctuation, u'{'), - (Token.Name.Label, u'ruckenstein-diffusion'), - (Token.Punctuation, u','), - (Token.Text, u'\n '), - (Token.Name.Attribute, u'author'), - (Token.Text, u' '), - (Token.Punctuation, u'='), - (Token.Text, u' '), - (Token.String, u'"'), - (Token.String, u'Liu, Hongquin'), - (Token.String, u'"'), - (Token.Text, u' '), - (Token.Punctuation, u'#'), - (Token.Text, u' '), - (Token.Name.Variable, u'and'), - (Token.Text, u' '), - (Token.Punctuation, u'#'), - (Token.Text, u' '), - (Token.String, u'"'), - (Token.String, u'Ruckenstein, Eli'), - (Token.String, u'"'), - (Token.Punctuation, u','), - (Token.Text, u'\n '), - (Token.Name.Attribute, u'year'), - (Token.Text, u' '), - (Token.Punctuation, u'='), - (Token.Text, u' '), - (Token.Number, u'1997'), - (Token.Punctuation, u','), - (Token.Text, u'\n '), - (Token.Name.Attribute, u'month'), - (Token.Text, u' '), - (Token.Punctuation, u'='), - (Token.Text, u' '), - (Token.Name.Variable, u'JAN'), - (Token.Punctuation, u','), - (Token.Text, u'\n '), - (Token.Name.Attribute, u'pages'), - (Token.Text, u' '), - (Token.Punctuation, u'='), - (Token.Text, u' '), - (Token.String, u'"'), - (Token.String, u'888-895'), - (Token.String, u'"'), - (Token.Text, u'\n'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens) - - def testComment(self): - data = '@COMMENT{test}' - tokens = [ - (Token.Comment, u'@COMMENT'), - (Token.Comment, u'{test}'), - (Token.Text, u'\n'), - ] - self.assertEqual(list(self.lexer.get_tokens(data)), tokens) - - def testMissingBody(self): - data = '@ARTICLE xxx' - tokens = [ - (Token.Name.Class, u'@ARTICLE'), - (Token.Text, u' '), - (Token.Error, u'x'), - (Token.Error, u'x'), - (Token.Error, u'x'), - (Token.Text, u'\n'), - ] - self.assertEqual(list(self.lexer.get_tokens(data)), tokens) - - def testMismatchedBrace(self): - data = '@PREAMBLE(""}' - tokens = [ - (Token.Name.Class, u'@PREAMBLE'), - (Token.Punctuation, u'('), - (Token.String, u'"'), - (Token.String, u'"'), - (Token.Error, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(list(self.lexer.get_tokens(data)), tokens) - - -class BSTTest(unittest.TestCase): - def setUp(self): - self.lexer = BSTLexer() - - def testBasicBST(self): - data = """ - % BibTeX standard bibliography style `plain' - - INTEGERS { output.state before.all } - - FUNCTION {sort.format.title} - { 't := - "A " #2 - "An " #3 - "The " #4 t chop.word - chop.word - chop.word - sortify - #1 global.max$ substring$ - } - - ITERATE {call.type$} - """ - tokens = [ - (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"), - (Token.Text, u'\n\n'), - (Token.Keyword, u'INTEGERS'), - (Token.Text, u' '), - (Token.Punctuation, u'{'), - (Token.Text, u' '), - (Token.Name.Variable, u'output.state'), - (Token.Text, u' '), - (Token.Name.Variable, u'before.all'), - (Token.Text, u' '), - (Token.Punctuation, u'}'), - (Token.Text, u'\n\n'), - (Token.Keyword, u'FUNCTION'), - (Token.Text, u' '), - (Token.Punctuation, u'{'), - (Token.Name.Variable, u'sort.format.title'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - (Token.Punctuation, u'{'), - (Token.Text, u' '), - (Token.Name.Function, u"'t"), - (Token.Text, u' '), - (Token.Name.Variable, u':='), - (Token.Text, u'\n'), - (Token.Literal.String, u'"A "'), - (Token.Text, u' '), - (Token.Literal.Number, u'#2'), - (Token.Text, u'\n '), - (Token.Literal.String, u'"An "'), - (Token.Text, u' '), - (Token.Literal.Number, u'#3'), - (Token.Text, u'\n '), - (Token.Literal.String, u'"The "'), - (Token.Text, u' '), - (Token.Literal.Number, u'#4'), - (Token.Text, u' '), - (Token.Name.Variable, u't'), - (Token.Text, u' '), - (Token.Name.Variable, u'chop.word'), - (Token.Text, u'\n '), - (Token.Name.Variable, u'chop.word'), - (Token.Text, u'\n'), - (Token.Name.Variable, u'chop.word'), - (Token.Text, u'\n'), - (Token.Name.Variable, u'sortify'), - (Token.Text, u'\n'), - (Token.Literal.Number, u'#1'), - (Token.Text, u' '), - (Token.Name.Builtin, u'global.max$'), - (Token.Text, u' '), - (Token.Name.Builtin, u'substring$'), - (Token.Text, u'\n'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n\n'), - (Token.Keyword, u'ITERATE'), - (Token.Text, u' '), - (Token.Punctuation, u'{'), - (Token.Name.Builtin, u'call.type$'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens) diff --git a/tests/test_cfm.py b/tests/test_cfm.py deleted file mode 100644 index e7147a6c..00000000 --- a/tests/test_cfm.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Basic ColdfusionHtmlLexer Test - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest -import os - -from pygments.token import Token -from pygments.lexers import ColdfusionHtmlLexer - - -class ColdfusionHtmlLexerTest(unittest.TestCase): - - def setUp(self): - self.lexer = ColdfusionHtmlLexer() - - def testBasicComment(self): - fragment = u'' - expected = [ - (Token.Text, u''), - (Token.Comment.Multiline, u''), - (Token.Text, u'\n'), - ] - self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) - - def testNestedComment(self): - fragment = u' --->' - expected = [ - (Token.Text, u''), - (Token.Comment.Multiline, u''), - (Token.Comment.Multiline, u' '), - (Token.Comment.Multiline, u'--->'), - (Token.Text, u'\n'), - ] - self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) diff --git a/tests/test_clexer.py b/tests/test_clexer.py deleted file mode 100644 index 64b765ef..00000000 --- a/tests/test_clexer.py +++ /dev/null @@ -1,259 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Basic CLexer Test - ~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest -import os -import textwrap - -from pygments.token import Text, Number, Token -from pygments.lexers import CLexer - - -class CLexerTest(unittest.TestCase): - - def setUp(self): - self.lexer = CLexer() - - def testNumbers(self): - code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23' - wanted = [] - for item in zip([Number.Integer, Number.Float, Number.Float, - Number.Float, Number.Oct, Number.Hex, - Number.Float, Number.Float], code.split()): - wanted.append(item) - wanted.append((Text, ' ')) - wanted = wanted[:-1] + [(Text, '\n')] - self.assertEqual(list(self.lexer.get_tokens(code)), wanted) - - def testSwitch(self): - fragment = u'''\ - int main() - { - switch (0) - { - case 0: - default: - ; - } - } - ''' - tokens = [ - (Token.Keyword.Type, u'int'), - (Token.Text, u' '), - (Token.Name.Function, u'main'), - (Token.Punctuation, u'('), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Punctuation, u'{'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'switch'), - (Token.Text, u' '), - (Token.Punctuation, u'('), - (Token.Literal.Number.Integer, u'0'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Punctuation, u'{'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'case'), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'0'), - (Token.Operator, u':'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'default'), - (Token.Operator, u':'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) - - def testSwitchSpaceBeforeColon(self): - fragment = u'''\ - int main() - { - switch (0) - { - case 0 : - default : - ; - } - } - ''' - tokens = [ - (Token.Keyword.Type, u'int'), - (Token.Text, u' '), - (Token.Name.Function, u'main'), - (Token.Punctuation, u'('), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Punctuation, u'{'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'switch'), - (Token.Text, u' '), - (Token.Punctuation, u'('), - (Token.Literal.Number.Integer, u'0'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Punctuation, u'{'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'case'), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'0'), - (Token.Text, u' '), - (Token.Operator, u':'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'default'), - (Token.Text, u' '), - (Token.Operator, u':'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) - - def testLabel(self): - fragment = u'''\ - int main() - { - foo: - goto foo; - } - ''' - tokens = [ - (Token.Keyword.Type, u'int'), - (Token.Text, u' '), - (Token.Name.Function, u'main'), - (Token.Punctuation, u'('), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Punctuation, u'{'), - (Token.Text, u'\n'), - (Token.Name.Label, u'foo'), - (Token.Punctuation, u':'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'goto'), - (Token.Text, u' '), - (Token.Name, u'foo'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) - - def testLabelSpaceBeforeColon(self): - fragment = u'''\ - int main() - { - foo : - goto foo; - } - ''' - tokens = [ - (Token.Keyword.Type, u'int'), - (Token.Text, u' '), - (Token.Name.Function, u'main'), - (Token.Punctuation, u'('), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Punctuation, u'{'), - (Token.Text, u'\n'), - (Token.Name.Label, u'foo'), - (Token.Text, u' '), - (Token.Punctuation, u':'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'goto'), - (Token.Text, u' '), - (Token.Name, u'foo'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) - - def testLabelFollowedByStatement(self): - fragment = u'''\ - int main() - { - foo:return 0; - goto foo; - } - ''' - tokens = [ - (Token.Keyword.Type, u'int'), - (Token.Text, u' '), - (Token.Name.Function, u'main'), - (Token.Punctuation, u'('), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Punctuation, u'{'), - (Token.Text, u'\n'), - (Token.Name.Label, u'foo'), - (Token.Punctuation, u':'), - (Token.Keyword, u'return'), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'0'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'goto'), - (Token.Text, u' '), - (Token.Name, u'foo'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment)))) - - def testPreprocFile(self): - fragment = u'#include \n' - tokens = [ - (Token.Comment.Preproc, u'#'), - (Token.Comment.Preproc, u'include'), - (Token.Text, u' '), - (Token.Comment.PreprocFile, u''), - (Token.Comment.Preproc, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testPreprocFile2(self): - fragment = u'#include "foo.h"\n' - tokens = [ - (Token.Comment.Preproc, u'#'), - (Token.Comment.Preproc, u'include'), - (Token.Text, u' '), - (Token.Comment.PreprocFile, u'"foo.h"'), - (Token.Comment.Preproc, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py deleted file mode 100644 index 169d690d..00000000 --- a/tests/test_cmdline.py +++ /dev/null @@ -1,313 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Command line test - ~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from __future__ import print_function - -import io -import os -import re -import sys -import tempfile -import unittest - -import support -from pygments import cmdline, highlight -from pygments.util import BytesIO, StringIO - - -TESTFILE, TESTDIR = support.location(__file__) -TESTCODE = '''\ -def func(args): - pass -''' - - -def _decode_output(text): - try: - return text.decode('utf-8') - except UnicodeEncodeError: # implicit encode on Python 2 with data loss - return text - - -def run_cmdline(*args, **kwds): - saved_stdin = sys.stdin - saved_stdout = sys.stdout - saved_stderr = sys.stderr - if sys.version_info > (3,): - stdin_buffer = BytesIO() - stdout_buffer = BytesIO() - stderr_buffer = BytesIO() - new_stdin = sys.stdin = io.TextIOWrapper(stdin_buffer, 'utf-8') - new_stdout = sys.stdout = io.TextIOWrapper(stdout_buffer, 'utf-8') - new_stderr = sys.stderr = io.TextIOWrapper(stderr_buffer, 'utf-8') - else: - stdin_buffer = new_stdin = sys.stdin = StringIO() - stdout_buffer = new_stdout = sys.stdout = StringIO() - stderr_buffer = new_stderr = sys.stderr = StringIO() - new_stdin.write(kwds.get('stdin', '')) - new_stdin.seek(0, 0) - try: - ret = cmdline.main(['pygmentize'] + list(args)) - finally: - sys.stdin = saved_stdin - sys.stdout = saved_stdout - sys.stderr = saved_stderr - new_stdout.flush() - new_stderr.flush() - out, err = stdout_buffer.getvalue(), \ - stderr_buffer.getvalue() - return (ret, _decode_output(out), _decode_output(err)) - - -class CmdLineTest(unittest.TestCase): - - def check_success(self, *cmdline, **kwds): - code, out, err = run_cmdline(*cmdline, **kwds) - self.assertEqual(code, 0) - self.assertEqual(err, '') - return out - - def check_failure(self, *cmdline, **kwds): - expected_code = kwds.pop('code', 1) - code, out, err = run_cmdline(*cmdline, **kwds) - self.assertEqual(code, expected_code) - self.assertEqual(out, '') - return err - - def test_normal(self): - # test that cmdline gives the same output as library api - from pygments.lexers import PythonLexer - from pygments.formatters import HtmlFormatter - filename = TESTFILE - with open(filename, 'rb') as fp: - code = fp.read() - - output = highlight(code, PythonLexer(), HtmlFormatter()) - - o = self.check_success('-lpython', '-fhtml', filename) - self.assertEqual(o, output) - - def test_stdin(self): - o = self.check_success('-lpython', '-fhtml', stdin=TESTCODE) - o = re.sub('<[^>]*>', '', o) - # rstrip is necessary since HTML inserts a \n after the last
    - self.assertEqual(o.rstrip(), TESTCODE.rstrip()) - - # guess if no lexer given - o = self.check_success('-fhtml', stdin=TESTCODE) - o = re.sub('<[^>]*>', '', o) - # rstrip is necessary since HTML inserts a \n after the last - self.assertEqual(o.rstrip(), TESTCODE.rstrip()) - - def test_outfile(self): - # test that output file works with and without encoding - fd, name = tempfile.mkstemp() - os.close(fd) - for opts in [['-fhtml', '-o', name, TESTFILE], - ['-flatex', '-o', name, TESTFILE], - ['-fhtml', '-o', name, '-O', 'encoding=utf-8', TESTFILE]]: - try: - self.check_success(*opts) - finally: - os.unlink(name) - - def test_load_from_file(self): - lexer_file = os.path.join(TESTDIR, 'support', 'python_lexer.py') - formatter_file = os.path.join(TESTDIR, 'support', 'html_formatter.py') - - # By default, use CustomLexer - o = self.check_success('-l', lexer_file, '-f', 'html', - '-x', stdin=TESTCODE) - o = re.sub('<[^>]*>', '', o) - # rstrip is necessary since HTML inserts a \n after the last - self.assertEqual(o.rstrip(), TESTCODE.rstrip()) - - # If user specifies a name, use it - o = self.check_success('-f', 'html', '-x', '-l', - lexer_file + ':LexerWrapper', stdin=TESTCODE) - o = re.sub('<[^>]*>', '', o) - # rstrip is necessary since HTML inserts a \n after the last - self.assertEqual(o.rstrip(), TESTCODE.rstrip()) - - # Should also work for formatters - o = self.check_success('-lpython', '-f', - formatter_file + ':HtmlFormatterWrapper', - '-x', stdin=TESTCODE) - o = re.sub('<[^>]*>', '', o) - # rstrip is necessary since HTML inserts a \n after the last - self.assertEqual(o.rstrip(), TESTCODE.rstrip()) - - def test_stream_opt(self): - o = self.check_success('-lpython', '-s', '-fterminal', stdin=TESTCODE) - o = re.sub(r'\x1b\[.*?m', '', o) - self.assertEqual(o.replace('\r\n', '\n'), TESTCODE) - - def test_h_opt(self): - o = self.check_success('-h') - self.assertTrue('Usage:' in o) - - def test_L_opt(self): - o = self.check_success('-L') - self.assertTrue('Lexers' in o and 'Formatters' in o and - 'Filters' in o and 'Styles' in o) - o = self.check_success('-L', 'lexer') - self.assertTrue('Lexers' in o and 'Formatters' not in o) - self.check_success('-L', 'lexers') - - def test_O_opt(self): - filename = TESTFILE - o = self.check_success('-Ofull=1,linenos=true,foo=bar', - '-fhtml', filename) - self.assertTrue('foo, bar=baz=,' in o) - - def test_F_opt(self): - filename = TESTFILE - o = self.check_success('-Fhighlight:tokentype=Name.Blubb,' - 'names=TESTFILE filename', - '-fhtml', filename) - self.assertTrue('(other : self) : Int\n' - '{%for field in %w(first_name middle_name last_name)%}\n' - 'cmp={{field.id}}<=>other.{{field.id}}\n' - 'return cmp if cmp!=0\n' - '{%end%}\n' - '0\n' - 'end\n') - tokens = [ - (Keyword, 'def'), - (Name.Function, '<=>'), - (Punctuation, '('), - (Name, 'other'), - (Text, ' '), - (Punctuation, ':'), - (Text, ' '), - (Keyword.Pseudo, 'self'), - (Punctuation, ')'), - (Text, ' '), - (Punctuation, ':'), - (Text, ' '), - (Name.Builtin, 'Int'), - (Text, '\n'), - (String.Interpol, '{%'), - (Keyword, 'for'), - (Text, ' '), - (Name, 'field'), - (Text, ' '), - (Keyword, 'in'), - (Text, ' '), - (String.Other, '%w('), - (String.Other, 'first_name middle_name last_name'), - (String.Other, ')'), - (String.Interpol, '%}'), - (Text, '\n'), - (Name, 'cmp'), - (Operator, '='), - (String.Interpol, '{{'), - (Name, 'field'), - (Operator, '.'), - (Name, 'id'), - (String.Interpol, '}}'), - (Operator, '<=>'), - (Name, 'other'), - (Operator, '.'), - (String.Interpol, '{{'), - (Name, 'field'), - (Operator, '.'), - (Name, 'id'), - (String.Interpol, '}}'), - (Text, '\n'), - (Keyword, 'return'), - (Text, ' '), - (Name, 'cmp'), - (Text, ' '), - (Keyword, 'if'), - (Text, ' '), - (Name, 'cmp'), - (Operator, '!='), - (Number.Integer, '0'), - (Text, '\n'), - (String.Interpol, '{%'), - (Keyword, 'end'), - (String.Interpol, '%}'), - (Text, '\n'), - (Number.Integer, '0'), - (Text, '\n'), - (Keyword, 'end'), - (Text, '\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testLib(self): - fragment = ( - '@[Link("some")]\nlib LibSome\n' - '@[CallConvention("X86_StdCall")]\nfun foo="some.foo"(thing : Void*) : LibC::Int\n' - 'end\n') - tokens = [ - (Operator, '@['), - (Name.Decorator, 'Link'), - (Punctuation, '('), - (String.Double, '"'), - (String.Double, 'some'), - (String.Double, '"'), - (Punctuation, ')'), - (Operator, ']'), - (Text, '\n'), - (Keyword, 'lib'), - (Text, ' '), - (Name.Namespace, 'LibSome'), - (Text, '\n'), - (Operator, '@['), - (Name.Decorator, 'CallConvention'), - (Punctuation, '('), - (String.Double, '"'), - (String.Double, 'X86_StdCall'), - (String.Double, '"'), - (Punctuation, ')'), - (Operator, ']'), - (Text, '\n'), - (Keyword, 'fun'), - (Text, ' '), - (Name.Function, 'foo'), - (Operator, '='), - (String.Double, '"'), - (String.Double, 'some.foo'), - (String.Double, '"'), - (Punctuation, '('), - (Name, 'thing'), - (Text, ' '), - (Punctuation, ':'), - (Text, ' '), - (Name.Builtin, 'Void'), - (Operator, '*'), - (Punctuation, ')'), - (Text, ' '), - (Punctuation, ':'), - (Text, ' '), - (Name, 'LibC'), - (Operator, '::'), - (Name.Builtin, 'Int'), - (Text, '\n'), - (Keyword, 'end'), - (Text, '\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testEscapedBracestring(self): - fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n' - tokens = [ - (Name, 'str'), - (Operator, '.'), - (Name, 'gsub'), - (Punctuation, '('), - (String.Regex, '%r{'), - (String.Regex, '\\\\'), - (String.Regex, '\\\\'), - (String.Regex, '}'), - (Punctuation, ','), - (Text, ' '), - (String.Double, '"'), - (String.Double, '/'), - (String.Double, '"'), - (Punctuation, ')'), - (Text, '\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) diff --git a/tests/test_csound.py b/tests/test_csound.py deleted file mode 100644 index 8a253916..00000000 --- a/tests/test_csound.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Csound lexer tests - ~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest -from textwrap import dedent - -from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \ - String, Text -from pygments.lexers import CsoundOrchestraLexer - - -class CsoundOrchestraTest(unittest.TestCase): - - def setUp(self): - self.lexer = CsoundOrchestraLexer() - self.maxDiff = None - - def testComments(self): - fragment = dedent('''\ - /* - * comment - */ - ; comment - // comment - ''') - tokens = [ - (Comment.Multiline, u'/*\n * comment\n */'), - (Text, u'\n'), - (Comment.Single, u'; comment'), - (Text, u'\n'), - (Comment.Single, u'// comment'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testInstrumentBlocks(self): - fragment = dedent('''\ - instr/**/1,/**/N_a_M_e_,/**/+Name/**/// - iDuration = p3 - outc:a(aSignal) - endin - ''') - tokens = [ - (Keyword.Declaration, u'instr'), - (Comment.Multiline, u'/**/'), - (Name.Function, u'1'), - (Punctuation, u','), - (Comment.Multiline, u'/**/'), - (Name.Function, u'N_a_M_e_'), - (Punctuation, u','), - (Comment.Multiline, u'/**/'), - (Punctuation, u'+'), - (Name.Function, u'Name'), - (Comment.Multiline, u'/**/'), - (Comment.Single, u'//'), - (Text, u'\n'), - (Text, u' '), - (Keyword.Type, u'i'), - (Name, u'Duration'), - (Text, u' '), - (Operator, u'='), - (Text, u' '), - (Name.Variable.Instance, u'p3'), - (Text, u'\n'), - (Text, u' '), - (Name.Builtin, u'outc'), - (Punctuation, u':'), - (Keyword.Type, u'a'), - (Punctuation, u'('), - (Keyword.Type, u'a'), - (Name, u'Signal'), - (Punctuation, u')'), - (Text, u'\n'), - (Keyword.Declaration, u'endin'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testUserDefinedOpcodes(self): - fragment = dedent('''\ - opcode/**/aUDO,/**/i[],/**/aik// - aUDO - endop - ''') - tokens = [ - (Keyword.Declaration, u'opcode'), - (Comment.Multiline, u'/**/'), - (Name.Function, u'aUDO'), - (Punctuation, u','), - (Comment.Multiline, u'/**/'), - (Keyword.Type, u'i[]'), - (Punctuation, u','), - (Comment.Multiline, u'/**/'), - (Keyword.Type, u'aik'), - (Comment.Single, u'//'), - (Text, u'\n'), - (Text, u' '), - (Name.Function, u'aUDO'), - (Text, u'\n'), - (Keyword.Declaration, u'endop'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testNumbers(self): - fragment = '123 0123456789' - tokens = [ - (Number.Integer, u'123'), - (Text, u' '), - (Number.Integer, u'0123456789'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - fragment = '0xabcdef0123456789 0XABCDEF' - tokens = [ - (Keyword.Type, u'0x'), - (Number.Hex, u'abcdef0123456789'), - (Text, u' '), - (Keyword.Type, u'0X'), - (Number.Hex, u'ABCDEF'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - fragments = ['1e2', '3e+4', '5e-6', '7E8', '9E+0', '1E-2', '3.', '4.56', '.789'] - for fragment in fragments: - tokens = [ - (Number.Float, fragment), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testQuotedStrings(self): - fragment = '"characters$MACRO."' - tokens = [ - (String, u'"'), - (String, u'characters'), - (Comment.Preproc, u'$MACRO.'), - (String, u'"'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testBracedStrings(self): - fragment = dedent('''\ - {{ - characters$MACRO. - }} - ''') - tokens = [ - (String, u'{{'), - (String, u'\ncharacters$MACRO.\n'), - (String, u'}}'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testEscapeSequences(self): - for character in ['\\', 'a', 'b', 'n', 'r', 't', '"', '012', '345', '67']: - escapedCharacter = '\\' + character - fragment = '"' + escapedCharacter + '"' - tokens = [ - (String, u'"'), - (String.Escape, escapedCharacter), - (String, u'"'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - fragment = '{{' + escapedCharacter + '}}' - tokens = [ - (String, u'{{'), - (String.Escape, escapedCharacter), - (String, u'}}'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testOperators(self): - fragments = ['+', '-', '~', u'¬', '!', '*', '/', '^', '%', '<<', '>>', '<', '>', - '<=', '>=', '==', '!=', '&', '#', '|', '&&', '||', '?', ':', '+=', - '-=', '*=', '/='] - for fragment in fragments: - tokens = [ - (Operator, fragment), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testGlobalValueIdentifiers(self): - for fragment in ['0dbfs', 'A4', 'kr', 'ksmps', 'nchnls', 'nchnls_i', 'sr']: - tokens = [ - (Name.Variable.Global, fragment), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testKeywords(self): - fragments = ['do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', - 'kthen', 'od', 'then', 'until', 'while'] - for fragment in fragments: - tokens = [ - (Keyword, fragment), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - for fragment in ['return', 'rireturn']: - tokens = [ - (Keyword.Pseudo, fragment), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testLabels(self): - fragment = dedent('''\ - aLabel: - label2: - ''') - tokens = [ - (Name.Label, u'aLabel'), - (Punctuation, u':'), - (Text, u'\n'), - (Text, u' '), - (Name.Label, u'label2'), - (Punctuation, u':'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testPrintksAndPrintsEscapeSequences(self): - escapedCharacters = ['%!', '%%', '%n', '%N', '%r', '%R', '%t', '%T', '\\\\a', - '\\\\A', '\\\\b', '\\\\B', '\\\\n', '\\\\N', '\\\\r', - '\\\\R', '\\\\t', '\\\\T'] - for opcode in ['printks', 'prints']: - for escapedCharacter in escapedCharacters: - fragment = opcode + ' "' + escapedCharacter + '"' - tokens = [ - (Name.Builtin, opcode), - (Text, u' '), - (String, u'"'), - (String.Escape, escapedCharacter), - (String, u'"'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testGotoStatements(self): - for keyword in ['goto', 'igoto', 'kgoto']: - fragment = keyword + ' aLabel' - tokens = [ - (Keyword, keyword), - (Text, u' '), - (Name.Label, u'aLabel'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - for opcode in ['reinit', 'rigoto', 'tigoto']: - fragment = opcode + ' aLabel' - tokens = [ - (Keyword.Pseudo, opcode), - (Text, u' '), - (Name.Label, u'aLabel'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - for opcode in ['cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto', 'cnkgoto']: - fragment = opcode + ' 1==0, aLabel' - tokens = [ - (Keyword.Pseudo, opcode), - (Text, u' '), - (Number.Integer, u'1'), - (Operator, u'=='), - (Number.Integer, u'0'), - (Punctuation, u','), - (Text, u' '), - (Name.Label, u'aLabel'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - fragment = 'timout 0, 0, aLabel' - tokens = [ - (Keyword.Pseudo, 'timout'), - (Text, u' '), - (Number.Integer, u'0'), - (Punctuation, u','), - (Text, u' '), - (Number.Integer, u'0'), - (Punctuation, u','), - (Text, u' '), - (Name.Label, u'aLabel'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - for opcode in ['loop_ge', 'loop_gt', 'loop_le', 'loop_lt']: - fragment = opcode + ' 0, 0, 0, aLabel' - tokens = [ - (Keyword.Pseudo, opcode), - (Text, u' '), - (Number.Integer, u'0'), - (Punctuation, u','), - (Text, u' '), - (Number.Integer, u'0'), - (Punctuation, u','), - (Text, u' '), - (Number.Integer, u'0'), - (Punctuation, u','), - (Text, u' '), - (Name.Label, u'aLabel'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testIncludeDirectives(self): - for character in ['"', '|']: - fragment = '#include/**/' + character + 'file.udo' + character - tokens = [ - (Comment.Preproc, u'#include'), - (Comment.Multiline, u'/**/'), - (String, character + u'file.udo' + character), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testObjectLikeMacroDefinitions(self): - fragment = dedent('''\ - # \tdefine MACRO#macro_body# - #define/**/ - MACRO/**/ - #\\#macro - body\\## - ''') - tokens = [ - (Comment.Preproc, u'# \tdefine'), - (Text, u' '), - (Comment.Preproc, u'MACRO'), - (Punctuation, u'#'), - (Comment.Preproc, u'macro_body'), - (Punctuation, u'#'), - (Text, u'\n'), - (Comment.Preproc, u'#define'), - (Comment.Multiline, u'/**/'), - (Text, u'\n'), - (Comment.Preproc, u'MACRO'), - (Comment.Multiline, u'/**/'), - (Text, u'\n'), - (Punctuation, u'#'), - (Comment.Preproc, u'\\#'), - (Comment.Preproc, u'macro\nbody'), - (Comment.Preproc, u'\\#'), - (Punctuation, u'#'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testFunctionLikeMacroDefinitions(self): - fragment = dedent('''\ - #define MACRO(ARG1#ARG2) #macro_body# - #define/**/ - MACRO(ARG1'ARG2' ARG3)/**/ - #\\#macro - body\\## - ''') - tokens = [ - (Comment.Preproc, u'#define'), - (Text, u' '), - (Comment.Preproc, u'MACRO'), - (Punctuation, u'('), - (Comment.Preproc, u'ARG1'), - (Punctuation, u'#'), - (Comment.Preproc, u'ARG2'), - (Punctuation, u')'), - (Text, u' '), - (Punctuation, u'#'), - (Comment.Preproc, u'macro_body'), - (Punctuation, u'#'), - (Text, u'\n'), - (Comment.Preproc, u'#define'), - (Comment.Multiline, u'/**/'), - (Text, u'\n'), - (Comment.Preproc, u'MACRO'), - (Punctuation, u'('), - (Comment.Preproc, u'ARG1'), - (Punctuation, u"'"), - (Comment.Preproc, u'ARG2'), - (Punctuation, u"'"), - (Text, u' '), - (Comment.Preproc, u'ARG3'), - (Punctuation, u')'), - (Comment.Multiline, u'/**/'), - (Text, u'\n'), - (Punctuation, u'#'), - (Comment.Preproc, u'\\#'), - (Comment.Preproc, u'macro\nbody'), - (Comment.Preproc, u'\\#'), - (Punctuation, u'#'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testMacroPreprocessorDirectives(self): - for directive in ['#ifdef', '#ifndef', '#undef']: - fragment = directive + ' MACRO' - tokens = [ - (Comment.Preproc, directive), - (Text, u' '), - (Comment.Preproc, u'MACRO'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testOtherPreprocessorDirectives(self): - fragment = dedent('''\ - #else - #end - #endif - ### - @ \t12345 - @@ \t67890 - ''') - tokens = [ - (Comment.Preproc, u'#else'), - (Text, u'\n'), - (Comment.Preproc, u'#end'), - (Text, u'\n'), - (Comment.Preproc, u'#endif'), - (Text, u'\n'), - (Comment.Preproc, u'###'), - (Text, u'\n'), - (Comment.Preproc, u'@ \t12345'), - (Text, u'\n'), - (Comment.Preproc, u'@@ \t67890'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testFunctionLikeMacros(self): - fragment = "$MACRO.(((x#y\\)))' \"(#'x)\\)x\\))\"# {{x\\))x)\\)(#'}});" - tokens = [ - (Comment.Preproc, u'$MACRO.'), - (Punctuation, u'('), - (Comment.Preproc, u'('), - (Comment.Preproc, u'('), - (Comment.Preproc, u'x#y\\)'), - (Comment.Preproc, u')'), - (Comment.Preproc, u')'), - (Punctuation, u"'"), - (Comment.Preproc, u' '), - (String, u'"'), - (Error, u'('), - (Error, u'#'), - (Error, u"'"), - (String, u'x'), - (Error, u')'), - (Comment.Preproc, u'\\)'), - (String, u'x'), - (Comment.Preproc, u'\\)'), - (Error, u')'), - (String, u'"'), - (Punctuation, u'#'), - (Comment.Preproc, u' '), - (String, u'{{'), - (String, u'x'), - (Comment.Preproc, u'\\)'), - (Error, u')'), - (String, u'x'), - (Error, u')'), - (Comment.Preproc, u'\\)'), - (Error, u'('), - (Error, u'#'), - (Error, u"'"), - (String, u'}}'), - (Punctuation, u')'), - (Comment.Single, u';'), - (Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testName(self): - fragment = 'kG:V' - tokens = [ - (Keyword.Type, 'k'), - (Name, 'G'), - (Punctuation, ':'), - (Name, 'V'), - (Text, '\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) diff --git a/tests/test_data.py b/tests/test_data.py deleted file mode 100644 index 20e74be0..00000000 --- a/tests/test_data.py +++ /dev/null @@ -1,117 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Data Tests - ~~~~~~~~~~ - - :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest - -from pygments.lexers import JsonLexer, JsonBareObjectLexer, YamlLexer -from pygments.token import Token - - -class JsonTest(unittest.TestCase): - def setUp(self): - self.lexer = JsonLexer() - - def testBasic(self): - fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n' - tokens = [ - (Token.Punctuation, u'{'), - (Token.Name.Tag, u'"foo"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Literal.String.Double, u'"bar"'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Name.Tag, u'"foo2"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Punctuation, u'['), - (Token.Literal.Number.Integer, u'1'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'3'), - (Token.Punctuation, u']'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - -class JsonBareObjectTest(unittest.TestCase): - def setUp(self): - self.lexer = JsonBareObjectLexer() - - def testBasic(self): - # This is the same as testBasic for JsonLexer above, except the - # enclosing curly braces are removed. - fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n' - tokens = [ - (Token.Name.Tag, u'"foo"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Literal.String.Double, u'"bar"'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Name.Tag, u'"foo2"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Punctuation, u'['), - (Token.Literal.Number.Integer, u'1'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'3'), - (Token.Punctuation, u']'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testClosingCurly(self): - # This can be an Error token, but should not be a can't-pop-from-stack - # exception. - fragment = '}"a"\n' - tokens = [ - (Token.Error, '}'), - (Token.Name.Tag, '"a"'), - (Token.Text, '\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testClosingCurlyInValue(self): - fragment = '"": ""}\n' - tokens = [ - (Token.Name.Tag, '""'), - (Token.Punctuation, ':'), - (Token.Text, ' '), - (Token.Literal.String.Double, '""'), - (Token.Error, '}'), - (Token.Text, '\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - -class YamlTest(unittest.TestCase): - def setUp(self): - self.lexer = YamlLexer() - - def testColonInComment(self): - # Bug #1528: This previously parsed 'token # innocent' as a tag - fragment = u'here: token # innocent: comment\n' - tokens = [ - (Token.Name.Tag, u'here'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Literal.Scalar.Plain, u'token'), - (Token.Text, u' '), - (Token.Comment.Single, u'# innocent: comment'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py deleted file mode 100644 index e208403b..00000000 --- a/tests/test_examplefiles.py +++ /dev/null @@ -1,138 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Pygments tests with example files - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from __future__ import print_function - -import os -import pprint -import difflib -import pickle - -from pygments.lexers import get_lexer_for_filename, get_lexer_by_name -from pygments.token import Error -from pygments.util import ClassNotFound - -import support - -STORE_OUTPUT = False - -STATS = {} - -TESTDIR = os.path.dirname(__file__) - -# Jython generates a StackOverflowError for repetitions of the form (a|b)+, -# which are commonly used in string patterns, when matching more than about 1000 -# chars. These tests do not complete. See http://bugs.jython.org/issue1965 -BAD_FILES_FOR_JYTHON = ('Object.st', 'all.nit', 'genclass.clj', - 'ragel-cpp_rlscan') - -def test_example_files(): - global STATS - STATS = {} - outdir = os.path.join(TESTDIR, 'examplefiles', 'output') - if STORE_OUTPUT and not os.path.isdir(outdir): - os.makedirs(outdir) - for fn in os.listdir(os.path.join(TESTDIR, 'examplefiles')): - if fn.startswith('.') or fn.endswith('#'): - continue - - absfn = os.path.join(TESTDIR, 'examplefiles', fn) - if not os.path.isfile(absfn): - continue - - extension = os.getenv('TEST_EXT') - if extension and not absfn.endswith(extension): - continue - - print(absfn) - with open(absfn, 'rb') as f: - code = f.read() - try: - code = code.decode('utf-8') - except UnicodeError: - code = code.decode('latin1') - - lx = None - if '_' in fn: - try: - lx = get_lexer_by_name(fn.split('_')[0]) - except ClassNotFound: - pass - if lx is None: - try: - lx = get_lexer_for_filename(absfn, code=code) - except ClassNotFound: - raise AssertionError('file %r has no registered extension, ' - 'nor is of the form _filename ' - 'for overriding, thus no lexer found.' - % fn) - yield check_lexer, lx, fn - - N = 7 - stats = list(STATS.items()) - stats.sort(key=lambda x: x[1][1]) - print('\nExample files that took longest absolute time:') - for fn, t in stats[-N:]: - print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t)) - print() - stats.sort(key=lambda x: x[1][2]) - print('\nExample files that took longest relative time:') - for fn, t in stats[-N:]: - print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t)) - - -def check_lexer(lx, fn): - if os.name == 'java' and fn in BAD_FILES_FOR_JYTHON: - raise support.SkipTest('%s is a known bad file on Jython' % fn) - absfn = os.path.join(TESTDIR, 'examplefiles', fn) - with open(absfn, 'rb') as fp: - text = fp.read() - text = text.replace(b'\r\n', b'\n') - text = text.strip(b'\n') + b'\n' - try: - text = text.decode('utf-8') - if text.startswith(u'\ufeff'): - text = text[len(u'\ufeff'):] - except UnicodeError: - text = text.decode('latin1') - ntext = [] - tokens = [] - import time - t1 = time.time() - for type, val in lx.get_tokens(text): - ntext.append(val) - assert type != Error, \ - 'lexer %s generated error token for %s: %r at position %d' % \ - (lx, absfn, val, len(u''.join(ntext))) - tokens.append((type, val)) - t2 = time.time() - STATS[os.path.basename(absfn)] = (len(text), - 1000 * (t2 - t1), 1000 * (t2 - t1) / len(text)) - if u''.join(ntext) != text: - print('\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(), - text.splitlines()))) - raise AssertionError('round trip failed for ' + absfn) - - # check output against previous run if enabled - if STORE_OUTPUT: - # no previous output -- store it - outfn = os.path.join(TESTDIR, 'examplefiles', 'output', fn) - if not os.path.isfile(outfn): - with open(outfn, 'wb') as fp: - pickle.dump(tokens, fp) - return - # otherwise load it and compare - with open(outfn, 'rb') as fp: - stored_tokens = pickle.load(fp) - if stored_tokens != tokens: - f1 = pprint.pformat(stored_tokens) - f2 = pprint.pformat(tokens) - print('\n'.join(difflib.unified_diff(f1.splitlines(), - f2.splitlines()))) - assert False, absfn diff --git a/tests/test_ezhil.py b/tests/test_ezhil.py deleted file mode 100644 index 15cc13b1..00000000 --- a/tests/test_ezhil.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Basic EzhilLexer Test - ~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2015 Muthiah Annamalai - :license: BSD, see LICENSE for details. -""" - -import unittest - -from pygments.token import Operator, Number, Text, Token -from pygments.lexers import EzhilLexer - - -class EzhilTest(unittest.TestCase): - - def setUp(self): - self.lexer = EzhilLexer() - self.maxDiff = None - - def testSum(self): - fragment = u'1+3\n' - tokens = [ - (Number.Integer, u'1'), - (Operator, u'+'), - (Number.Integer, u'3'), - (Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testGCDExpr(self): - fragment = u'1^3+(5-5)*gcd(a,b)\n' - tokens = [ - (Token.Number.Integer,u'1'), - (Token.Operator,u'^'), - (Token.Literal.Number.Integer, u'3'), - (Token.Operator, u'+'), - (Token.Punctuation, u'('), - (Token.Literal.Number.Integer, u'5'), - (Token.Operator, u'-'), - (Token.Literal.Number.Integer, u'5'), - (Token.Punctuation, u')'), - (Token.Operator, u'*'), - (Token.Name, u'gcd'), - (Token.Punctuation, u'('), - (Token.Name, u'a'), - (Token.Operator, u','), - (Token.Name, u'b'), - (Token.Punctuation, u')'), - (Token.Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testIfStatement(self): - fragment = u"""@( 0 > 3 ) ஆனால் - பதிப்பி "wont print" -முடி""" - tokens = [ - (Token.Operator, u'@'), - (Token.Punctuation, u'('), - (Token.Text, u' '), - (Token.Literal.Number.Integer,u'0'), - (Token.Text, u' '), - (Token.Operator,u'>'), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'3'), - (Token.Text, u' '), - (Token.Punctuation, u')'), - (Token.Text, u' '), - (Token.Keyword, u'ஆனால்'), - (Token.Text, u'\n'), - (Token.Text, u'\t'), - (Token.Keyword, u'பதிப்பி'), - (Token.Text, u' '), - (Token.Literal.String, u'"wont print"'), - (Token.Text, u'\t'), - (Token.Text, u'\n'), - (Token.Keyword, u'முடி'), - (Token.Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testFunction(self): - fragment = u"""# (C) முத்தையா அண்ணாமலை 2013, 2015 -நிரல்பாகம் gcd ( x, y ) - மு = max(x,y) - q = min(x,y) - - @( q == 0 ) ஆனால் - பின்கொடு மு - முடி - பின்கொடு gcd( மு - q , q ) -முடி\n""" - tokens = [ - (Token.Comment.Single, - u'# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85' - u'\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'), - (Token.Keyword,u'நிரல்பாகம்'), - (Token.Text, u' '), - (Token.Name, u'gcd'), - (Token.Text, u' '), - (Token.Punctuation, u'('), - (Token.Text, u' '), - (Token.Name, u'x'), - (Token.Operator, u','), - (Token.Text, u' '), - (Token.Name, u'y'), - (Token.Text, u' '), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Name, u'\u0bae\u0bc1'), - (Token.Text, u' '), - (Token.Operator, u'='), - (Token.Text, u' '), - (Token.Name.Builtin, u'max'), - (Token.Punctuation, u'('), - (Token.Name, u'x'), - (Token.Operator, u','), - (Token.Name, u'y'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Name, u'q'), - (Token.Text, u' '), - (Token.Operator, u'='), - (Token.Text, u' '), - (Token.Name.Builtin, u'min'), - (Token.Punctuation, u'('), - (Token.Name, u'x'), - (Token.Operator, u','), - (Token.Name, u'y'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Operator, u'@'), - (Token.Punctuation, u'('), - (Token.Text, u' '), - (Token.Name, u'q'), - (Token.Text, u' '), - (Token.Operator, u'=='), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'0'), - (Token.Text, u' '), - (Token.Punctuation, u')'), - (Token.Text, u' '), - (Token.Keyword, u'ஆனால்'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'பின்கொடு'), - (Token.Text, u' '), - (Token.Name, u'\u0bae\u0bc1'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'முடி'), - (Token.Text, u'\n'), - (Token.Text, u' '), - (Token.Keyword, u'\u0baa\u0bbf\u0ba9\u0bcd\u0b95\u0bca\u0b9f\u0bc1'), - (Token.Text, u' '), - (Token.Name, u'gcd'), - (Token.Punctuation, u'('), - (Token.Text, u' '), - (Token.Name, u'\u0bae\u0bc1'), - (Token.Text, u' '), - (Token.Operator, u'-'), - (Token.Text, u' '), - (Token.Name, u'q'), - (Token.Text, u' '), - (Token.Operator, u','), - (Token.Text, u' '), - (Token.Name, u'q'), - (Token.Text, u' '), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - (Token.Keyword, u'முடி'), #u'\u0bae\u0bc1\u0b9f\u0bbf'), - (Token.Text, u'\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - -if __name__ == "__main__": - unittest.main() diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py deleted file mode 100644 index 37efd6f0..00000000 --- a/tests/test_html_formatter.py +++ /dev/null @@ -1,200 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Pygments HTML formatter tests - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from __future__ import print_function - -import io -import os -import re -import unittest -import tempfile -from os.path import join, dirname, isfile - -from pygments.util import StringIO -from pygments.lexers import PythonLexer -from pygments.formatters import HtmlFormatter, NullFormatter -from pygments.formatters.html import escape_html - -import support - -TESTFILE, TESTDIR = support.location(__file__) - -with io.open(TESTFILE, encoding='utf-8') as fp: - tokensource = list(PythonLexer().get_tokens(fp.read())) - - -class HtmlFormatterTest(unittest.TestCase): - def test_correct_output(self): - hfmt = HtmlFormatter(nowrap=True) - houtfile = StringIO() - hfmt.format(tokensource, houtfile) - - nfmt = NullFormatter() - noutfile = StringIO() - nfmt.format(tokensource, noutfile) - - stripped_html = re.sub('<.*?>', '', houtfile.getvalue()) - escaped_text = escape_html(noutfile.getvalue()) - self.assertEqual(stripped_html, escaped_text) - - def test_external_css(self): - # test correct behavior - # CSS should be in /tmp directory - fmt1 = HtmlFormatter(full=True, cssfile='fmt1.css', outencoding='utf-8') - # CSS should be in TESTDIR (TESTDIR is absolute) - fmt2 = HtmlFormatter(full=True, cssfile=join(TESTDIR, 'fmt2.css'), - outencoding='utf-8') - tfile = tempfile.NamedTemporaryFile(suffix='.html') - fmt1.format(tokensource, tfile) - try: - fmt2.format(tokensource, tfile) - self.assertTrue(isfile(join(TESTDIR, 'fmt2.css'))) - except IOError: - # test directory not writable - pass - tfile.close() - - self.assertTrue(isfile(join(dirname(tfile.name), 'fmt1.css'))) - os.unlink(join(dirname(tfile.name), 'fmt1.css')) - try: - os.unlink(join(TESTDIR, 'fmt2.css')) - except OSError: - pass - - def test_all_options(self): - def check(optdict): - outfile = StringIO() - fmt = HtmlFormatter(**optdict) - fmt.format(tokensource, outfile) - - for optdict in [ - dict(nowrap=True), - dict(linenos=True, full=True), - dict(linenos=True, linespans='L'), - dict(hl_lines=[1, 5, 10, 'xxx']), - dict(hl_lines=[1, 5, 10], noclasses=True), - ]: - check(optdict) - - for linenos in [False, 'table', 'inline']: - for noclasses in [False, True]: - for linenospecial in [0, 5]: - for anchorlinenos in [False, True]: - optdict = dict( - linenos=linenos, - noclasses=noclasses, - linenospecial=linenospecial, - anchorlinenos=anchorlinenos, - ) - check(optdict) - - def test_linenos(self): - optdict = dict(linenos=True) - outfile = StringIO() - fmt = HtmlFormatter(**optdict) - fmt.format(tokensource, outfile) - html = outfile.getvalue() - self.assertTrue(re.search(r"
    \s+1\s+2\s+3", html))
    -
    -    def test_linenos_with_startnum(self):
    -        optdict = dict(linenos=True, linenostart=5)
    -        outfile = StringIO()
    -        fmt = HtmlFormatter(**optdict)
    -        fmt.format(tokensource, outfile)
    -        html = outfile.getvalue()
    -        self.assertTrue(re.search(r"
    \s+5\s+6\s+7", html))
    -
    -    def test_lineanchors(self):
    -        optdict = dict(lineanchors="foo")
    -        outfile = StringIO()
    -        fmt = HtmlFormatter(**optdict)
    -        fmt.format(tokensource, outfile)
    -        html = outfile.getvalue()
    -        self.assertTrue(re.search("
    ", html))
    -
    -    def test_lineanchors_with_startnum(self):
    -        optdict = dict(lineanchors="foo", linenostart=5)
    -        outfile = StringIO()
    -        fmt = HtmlFormatter(**optdict)
    -        fmt.format(tokensource, outfile)
    -        html = outfile.getvalue()
    -        self.assertTrue(re.search("
    ", html))
    -
    -    def test_valid_output(self):
    -        # test all available wrappers
    -        fmt = HtmlFormatter(full=True, linenos=True, noclasses=True,
    -                            outencoding='utf-8')
    -
    -        handle, pathname = tempfile.mkstemp('.html')
    -        with os.fdopen(handle, 'w+b') as tfile:
    -            fmt.format(tokensource, tfile)
    -        catname = os.path.join(TESTDIR, 'dtds', 'HTML4.soc')
    -        try:
    -            import subprocess
    -            po = subprocess.Popen(['nsgmls', '-s', '-c', catname, pathname],
    -                                  stdout=subprocess.PIPE)
    -            ret = po.wait()
    -            output = po.stdout.read()
    -            po.stdout.close()
    -        except OSError:
    -            # nsgmls not available
    -            pass
    -        else:
    -            if ret:
    -                print(output)
    -            self.assertFalse(ret, 'nsgmls run reported errors')
    -
    -        os.unlink(pathname)
    -
    -    def test_get_style_defs(self):
    -        fmt = HtmlFormatter()
    -        sd = fmt.get_style_defs()
    -        self.assertTrue(sd.startswith('.'))
    -
    -        fmt = HtmlFormatter(cssclass='foo')
    -        sd = fmt.get_style_defs()
    -        self.assertTrue(sd.startswith('.foo'))
    -        sd = fmt.get_style_defs('.bar')
    -        self.assertTrue(sd.startswith('.bar'))
    -        sd = fmt.get_style_defs(['.bar', '.baz'])
    -        fl = sd.splitlines()[0]
    -        self.assertTrue('.bar' in fl and '.baz' in fl)
    -
    -    def test_unicode_options(self):
    -        fmt = HtmlFormatter(title=u'Föö',
    -                            cssclass=u'bär',
    -                            cssstyles=u'div:before { content: \'bäz\' }',
    -                            encoding='utf-8')
    -        handle, pathname = tempfile.mkstemp('.html')
    -        with os.fdopen(handle, 'w+b') as tfile:
    -            fmt.format(tokensource, tfile)
    -
    -    def test_ctags(self):
    -        try:
    -            import ctags
    -        except ImportError:
    -            # we can't check without the ctags module, but at least check the exception
    -            self.assertRaises(RuntimeError, HtmlFormatter, tagsfile='support/tags')
    -        else:
    -            # this tagfile says that test_ctags() is on line 165, even if it isn't
    -            # anymore in the actual source
    -            fmt = HtmlFormatter(tagsfile='support/tags', lineanchors='L',
    -                                tagurlformat='%(fname)s%(fext)s')
    -            outfile = StringIO()
    -            fmt.format(tokensource, outfile)
    -            self.assertTrue('test_ctags'
    -                            in outfile.getvalue())
    -
    -    def test_filename(self):
    -        optdict = dict(filename="test.py")
    -        outfile = StringIO()
    -        fmt = HtmlFormatter(**optdict)
    -        fmt.format(tokensource, outfile)
    -        html = outfile.getvalue()
    -        self.assertTrue(re.search("test.py
    ", html))
    diff --git a/tests/test_inherit.py b/tests/test_inherit.py
    deleted file mode 100644
    index 38acf328..00000000
    --- a/tests/test_inherit.py
    +++ /dev/null
    @@ -1,94 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Tests for inheritance in RegexLexer
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexer import RegexLexer, inherit
    -from pygments.token import Text
    -
    -
    -class InheritTest(unittest.TestCase):
    -    def test_single_inheritance_position(self):
    -        t = Two()
    -        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    -        self.assertEqual(['x', 'a', 'b', 'y'], pats)
    -    def test_multi_inheritance_beginning(self):
    -        t = Beginning()
    -        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    -        self.assertEqual(['x', 'a', 'b', 'y', 'm'], pats)
    -    def test_multi_inheritance_end(self):
    -        t = End()
    -        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    -        self.assertEqual(['m', 'x', 'a', 'b', 'y'], pats)
    -
    -    def test_multi_inheritance_position(self):
    -        t = Three()
    -        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    -        self.assertEqual(['i', 'x', 'a', 'b', 'y', 'j'], pats)
    -
    -    def test_single_inheritance_with_skip(self):
    -        t = Skipped()
    -        pats = [x[0].__self__.pattern for x in t._tokens['root']]
    -        self.assertEqual(['x', 'a', 'b', 'y'], pats)
    -
    -
    -class One(RegexLexer):
    -    tokens = {
    -        'root': [
    -            ('a', Text),
    -            ('b', Text),
    -        ],
    -    }
    -
    -class Two(One):
    -    tokens = {
    -        'root': [
    -            ('x', Text),
    -            inherit,
    -            ('y', Text),
    -        ],
    -    }
    -
    -class Three(Two):
    -    tokens = {
    -        'root': [
    -            ('i', Text),
    -            inherit,
    -            ('j', Text),
    -        ],
    -    }
    -
    -class Beginning(Two):
    -    tokens = {
    -        'root': [
    -            inherit,
    -            ('m', Text),
    -        ],
    -    }
    -
    -class End(Two):
    -    tokens = {
    -        'root': [
    -            ('m', Text),
    -            inherit,
    -        ],
    -    }
    -
    -class Empty(One):
    -    tokens = {}
    -
    -class Skipped(Empty):
    -    tokens = {
    -        'root': [
    -            ('x', Text),
    -            inherit,
    -            ('y', Text),
    -        ],
    -    }
    -
    diff --git a/tests/test_irc_formatter.py b/tests/test_irc_formatter.py
    deleted file mode 100644
    index 18bcd58b..00000000
    --- a/tests/test_irc_formatter.py
    +++ /dev/null
    @@ -1,30 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments IRC formatter tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -from __future__ import print_function
    -
    -import re
    -import unittest
    -
    -from pygments.util import StringIO
    -from pygments.lexers import PythonLexer
    -from pygments.formatters import IRCFormatter
    -
    -import support
    -
    -tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
    -
    -class IRCFormatterTest(unittest.TestCase):
    -    def test_correct_output(self):
    -        hfmt = IRCFormatter()
    -        houtfile = StringIO()
    -        hfmt.format(tokensource, houtfile)
    -
    -        self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
    -
    diff --git a/tests/test_java.py b/tests/test_java.py
    deleted file mode 100644
    index 5f520853..00000000
    --- a/tests/test_java.py
    +++ /dev/null
    @@ -1,78 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Basic JavaLexer Test
    -    ~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Text, Name, Operator, Keyword, Number
    -from pygments.lexers import JavaLexer
    -
    -
    -class JavaTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = JavaLexer()
    -        self.maxDiff = None
    -
    -    def testEnhancedFor(self):
    -        fragment = u'label:\nfor(String var2: var1) {}\n'
    -        tokens = [
    -            (Name.Label, u'label:'),
    -            (Text, u'\n'),
    -            (Keyword, u'for'),
    -            (Operator, u'('),
    -            (Name, u'String'),
    -            (Text, u' '),
    -            (Name, u'var2'),
    -            (Operator, u':'),
    -            (Text, u' '),
    -            (Name, u'var1'),
    -            (Operator, u')'),
    -            (Text, u' '),
    -            (Operator, u'{'),
    -            (Operator, u'}'),
    -            (Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testNumericLiterals(self):
    -        fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0'
    -        fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n'
    -        tokens = [
    -            (Number.Integer, '0'),
    -            (Text, ' '),
    -            (Number.Integer, '5L'),
    -            (Text, ' '),
    -            (Number.Integer, '9__542_72l'),
    -            (Text, ' '),
    -            (Number.Hex, '0xbEEf'),
    -            (Text, ' '),
    -            (Number.Hex, '0X9_A'),
    -            (Text, ' '),
    -            (Number.Oct, '0_35'),
    -            (Text, ' '),
    -            (Number.Oct, '01'),
    -            (Text, ' '),
    -            (Number.Bin, '0b0___101_0'),
    -            (Text, ' '),
    -            (Number.Float, '0.'),
    -            (Text, ' '),
    -            (Number.Float, '.7_17F'),
    -            (Text, ' '),
    -            (Number.Float, '3e-1_3d'),
    -            (Text, ' '),
    -            (Number.Float, '1f'),
    -            (Text, ' '),
    -            (Number.Float, '6_01.9e+3'),
    -            (Text, ' '),
    -            (Number.Float, '0x.1Fp3'),
    -            (Text, ' '),
    -            (Number.Float, '0XEP8D'),
    -            (Text, '\n')
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_javascript.py b/tests/test_javascript.py
    deleted file mode 100644
    index a2dfb7e1..00000000
    --- a/tests/test_javascript.py
    +++ /dev/null
    @@ -1,84 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Javascript tests
    -    ~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexers import CoffeeScriptLexer
    -from pygments.token import Token
    -
    -COFFEE_SLASH_GOLDEN = [
    -    # input_str, slashes_are_regex_here
    -    (r'/\\/', True),
    -    (r'/\\/i', True),
    -    (r'/\//', True),
    -    (r'/(\s)/', True),
    -    ('/a{2,8}/', True),
    -    ('/b*c?d+/', True),
    -    ('/(capture-match)/', True),
    -    ('/(?:do-not-capture-match)/', True),
    -    ('/this|or|that/', True),
    -    ('/[char-set]/', True),
    -    ('/[^neg-char_st]/', True),
    -    ('/^.*$/', True),
    -    (r'/\n(\f)\0\1\d\b\cm\u1234/', True),
    -    (r'/^.?([^/\\\n\w]*)a\1+$/.something(or_other) # something more complex', True),
    -    ("foo = (str) ->\n  /'|\"/.test str", True),
    -    ('a = a / b / c', False),
    -    ('a = a/b/c', False),
    -    ('a = a/b/ c', False),
    -    ('a = a /b/c', False),
    -    ('a = 1 + /d/.test(a)', True),
    -]
    -
    -def test_coffee_slashes():
    -    for input_str, slashes_are_regex_here in COFFEE_SLASH_GOLDEN:
    -        yield coffee_runner, input_str, slashes_are_regex_here
    -
    -def coffee_runner(input_str, slashes_are_regex_here):
    -    lex = CoffeeScriptLexer()
    -    output = list(lex.get_tokens(input_str))
    -    print(output)
    -    for t, s in output:
    -        if '/' in s:
    -            is_regex = t is Token.String.Regex
    -            assert is_regex == slashes_are_regex_here, (t, s)
    -
    -class CoffeeTest(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = CoffeeScriptLexer()
    -
    -    def testMixedSlashes(self):
    -        fragment = u'a?/foo/:1/2;\n'
    -        tokens = [
    -            (Token.Name.Other, u'a'),
    -            (Token.Operator, u'?'),
    -            (Token.Literal.String.Regex, u'/foo/'),
    -            (Token.Operator, u':'),
    -            (Token.Literal.Number.Integer, u'1'),
    -            (Token.Operator, u'/'),
    -            (Token.Literal.Number.Integer, u'2'),
    -            (Token.Punctuation, u';'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testBewareInfiniteLoop(self):
    -        # This demonstrates the case that "This isn't really guarding" comment
    -        # refers to.
    -        fragment = '/a/x;\n'
    -        tokens = [
    -            (Token.Text, ''),
    -            (Token.Operator, '/'),
    -            (Token.Name.Other, 'a'),
    -            (Token.Operator, '/'),
    -            (Token.Name.Other, 'x'),
    -            (Token.Punctuation, ';'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_julia.py b/tests/test_julia.py
    deleted file mode 100644
    index eda04b1c..00000000
    --- a/tests/test_julia.py
    +++ /dev/null
    @@ -1,58 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Julia Tests
    -    ~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexers import JuliaLexer
    -from pygments.token import Token
    -
    -
    -class JuliaTests(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = JuliaLexer()
    -
    -    def test_unicode(self):
    -        """
    -        Test that unicode character, √, in an expression is recognized
    -        """
    -        fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
    -        tokens = [
    -            (Token.Name, u's'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'='),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'\u221a'),
    -            (Token.Punctuation, u'('),
    -            (Token.Punctuation, u'('),
    -            (Token.Literal.Number.Integer, u'1'),
    -            (Token.Operator, u'/'),
    -            (Token.Name, u'n'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'*'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'sum'),
    -            (Token.Punctuation, u'('),
    -            (Token.Name, u'count'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'.^'),
    -            (Token.Text, u' '),
    -            (Token.Literal.Number.Integer, u'2'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'-'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'mu'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'.^'),
    -            (Token.Literal.Number.Integer, u'2'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_kotlin.py b/tests/test_kotlin.py
    deleted file mode 100644
    index 417d0d9f..00000000
    --- a/tests/test_kotlin.py
    +++ /dev/null
    @@ -1,131 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Basic JavaLexer Test
    -    ~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Text, Name, Operator, Keyword, Number, Punctuation, String
    -from pygments.lexers import KotlinLexer
    -
    -class KotlinTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = KotlinLexer()
    -        self.maxDiff = None
    -    
    -    def testCanCopeWithBackTickNamesInFunctions(self):
    -        fragment = u'fun `wo bble`'
    -        tokens = [
    -            (Keyword, u'fun'),
    -            (Text, u' '),
    -            (Name.Function, u'`wo bble`'),
    -            (Text, u'\n')
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testCanCopeWithCommasAndDashesInBackTickNames(self):
    -        fragment = u'fun `wo,-bble`'
    -        tokens = [
    -            (Keyword, u'fun'),
    -            (Text, u' '),
    -            (Name.Function, u'`wo,-bble`'),
    -            (Text, u'\n')
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -    
    -    def testCanCopeWithDestructuring(self):
    -        fragment = u'val (a, b) = '
    -        tokens = [
    -            (Keyword, u'val'),
    -            (Text, u' '),
    -            (Punctuation, u'('),
    -            (Name.Property, u'a'),
    -            (Punctuation, u','),
    -            (Text, u' '),
    -            (Name.Property, u'b'),
    -            (Punctuation, u')'),
    -            (Text, u' '),
    -            (Punctuation, u'='),
    -            (Text, u' '),
    -            (Text, u'\n')
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -    
    -    def testCanCopeGenericsInDestructuring(self):
    -        fragment = u'val (a: List, b: Set) ='
    -        tokens = [
    -            (Keyword, u'val'),
    -            (Text, u' '),
    -            (Punctuation, u'('),
    -            (Name.Property, u'a'),
    -            (Punctuation, u':'),
    -            (Text, u' '),
    -            (Name.Property, u'List'),
    -            (Punctuation, u'<'),
    -            (Name, u'Something'),
    -            (Punctuation, u'>'),
    -            (Punctuation, u','),
    -            (Text, u' '),
    -            (Name.Property, u'b'),
    -            (Punctuation, u':'),
    -            (Text, u' '),
    -            (Name.Property, u'Set'),
    -            (Punctuation, u'<'),
    -            (Name, u'Wobble'),
    -            (Punctuation, u'>'),
    -            (Punctuation, u')'),
    -            (Text, u' '),
    -            (Punctuation, u'='),
    -            (Text, u'\n')
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testCanCopeWithGenerics(self):
    -        fragment = u'inline fun  VaultService.queryBy(): Vault.Page {'
    -        tokens = [
    -            (Keyword, u'inline fun'),
    -            (Text, u' '),
    -            (Punctuation, u'<'),
    -            (Keyword, u'reified'),
    -            (Text, u' '),
    -            (Name, u'T'),
    -            (Text, u' '),
    -            (Punctuation, u':'),
    -            (Text, u' '),
    -            (Name, u'ContractState'),
    -            (Punctuation, u'>'),
    -            (Text, u' '),
    -            (Name.Class, u'VaultService'),
    -            (Punctuation, u'.'),
    -            (Name.Function, u'queryBy'),
    -            (Punctuation, u'('),
    -            (Punctuation, u')'),
    -            (Punctuation, u':'),
    -            (Text, u' '),
    -            (Name, u'Vault'),
    -            (Punctuation, u'.'),
    -            (Name, u'Page'),
    -            (Punctuation, u'<'),
    -            (Name, u'T'),
    -            (Punctuation, u'>'),
    -            (Text, u' '),
    -            (Punctuation, u'{'),
    -            (Text, u'\n')
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testShouldCopeWithMultilineComments(self):
    -        fragment = u'"""\nthis\nis\na\ncomment"""'
    -        tokens = [
    -            (String, u'"""\nthis\nis\na\ncomment"""'), 
    -            (Text, u'\n')
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -if __name__ == '__main__':
    -    unittest.main()
    diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py
    deleted file mode 100644
    index aa4ac3bb..00000000
    --- a/tests/test_latex_formatter.py
    +++ /dev/null
    @@ -1,54 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments LaTeX formatter tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -from __future__ import print_function
    -
    -import os
    -import unittest
    -import tempfile
    -
    -from pygments.formatters import LatexFormatter
    -from pygments.lexers import PythonLexer
    -
    -import support
    -
    -TESTFILE, TESTDIR = support.location(__file__)
    -
    -
    -class LatexFormatterTest(unittest.TestCase):
    -
    -    def test_valid_output(self):
    -        with open(TESTFILE) as fp:
    -            tokensource = list(PythonLexer().get_tokens(fp.read()))
    -        fmt = LatexFormatter(full=True, encoding='latin1')
    -
    -        handle, pathname = tempfile.mkstemp('.tex')
    -        # place all output files in /tmp too
    -        old_wd = os.getcwd()
    -        os.chdir(os.path.dirname(pathname))
    -        tfile = os.fdopen(handle, 'wb')
    -        fmt.format(tokensource, tfile)
    -        tfile.close()
    -        try:
    -            import subprocess
    -            po = subprocess.Popen(['latex', '-interaction=nonstopmode',
    -                                   pathname], stdout=subprocess.PIPE)
    -            ret = po.wait()
    -            output = po.stdout.read()
    -            po.stdout.close()
    -        except OSError as e:
    -            # latex not available
    -            raise support.SkipTest(e)
    -        else:
    -            if ret:
    -                print(output)
    -            self.assertFalse(ret, 'latex run reported errors')
    -
    -        os.unlink(pathname)
    -        os.chdir(old_wd)
    diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py
    deleted file mode 100644
    index 8d53c542..00000000
    --- a/tests/test_lexers_other.py
    +++ /dev/null
    @@ -1,80 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Tests for other lexers
    -    ~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -import glob
    -import os
    -import unittest
    -
    -from pygments.lexers import guess_lexer
    -from pygments.lexers.scripting import EasytrieveLexer, JclLexer, RexxLexer
    -
    -
    -def _exampleFilePath(filename):
    -    return os.path.join(os.path.dirname(__file__), 'examplefiles', filename)
    -
    -
    -class AnalyseTextTest(unittest.TestCase):
    -    def _testCanRecognizeAndGuessExampleFiles(self, lexer):
    -        assert lexer is not None
    -
    -        for pattern in lexer.filenames:
    -            exampleFilesPattern = _exampleFilePath(pattern)
    -            for exampleFilePath in glob.glob(exampleFilesPattern):
    -                with open(exampleFilePath, 'rb') as fp:
    -                    text = fp.read().decode('utf-8')
    -                probability = lexer.analyse_text(text)
    -                self.assertTrue(probability > 0,
    -                                '%s must recognize %r' % (
    -                                    lexer.name, exampleFilePath))
    -                guessedLexer = guess_lexer(text)
    -                self.assertEqual(guessedLexer.name, lexer.name)
    -
    -    def testCanRecognizeAndGuessExampleFiles(self):
    -        LEXERS_TO_TEST = [
    -            EasytrieveLexer,
    -            JclLexer,
    -            RexxLexer,
    -        ]
    -        for lexerToTest in LEXERS_TO_TEST:
    -            self._testCanRecognizeAndGuessExampleFiles(lexerToTest)
    -
    -
    -class EasyTrieveLexerTest(unittest.TestCase):
    -    def testCanGuessFromText(self):
    -        self.assertTrue(EasytrieveLexer.analyse_text('MACRO'))
    -        self.assertTrue(EasytrieveLexer.analyse_text('\nMACRO'))
    -        self.assertTrue(EasytrieveLexer.analyse_text(' \nMACRO'))
    -        self.assertTrue(EasytrieveLexer.analyse_text(' \n MACRO'))
    -        self.assertTrue(EasytrieveLexer.analyse_text('*\nMACRO'))
    -        self.assertTrue(EasytrieveLexer.analyse_text(
    -            '*\n *\n\n \n*\n MACRO'))
    -
    -
    -class RexxLexerTest(unittest.TestCase):
    -    def testCanGuessFromText(self):
    -        self.assertAlmostEqual(0.01, RexxLexer.analyse_text('/* */'))
    -        self.assertAlmostEqual(1.0,
    -                               RexxLexer.analyse_text('''/* Rexx */
    -                say "hello world"'''))
    -        val = RexxLexer.analyse_text('/* */\n'
    -                                     'hello:pRoceduRe\n'
    -                                     '  say "hello world"')
    -        self.assertTrue(val > 0.5, val)
    -        val = RexxLexer.analyse_text('''/* */
    -                if 1 > 0 then do
    -                    say "ok"
    -                end
    -                else do
    -                    say "huh?"
    -                end''')
    -        self.assertTrue(val > 0.2, val)
    -        val = RexxLexer.analyse_text('''/* */
    -                greeting = "hello world!"
    -                parse value greeting "hello" name "!"
    -                say name''')
    -        self.assertTrue(val > 0.2, val)
    diff --git a/tests/test_markdown_lexer.py b/tests/test_markdown_lexer.py
    deleted file mode 100644
    index c143586a..00000000
    --- a/tests/test_markdown_lexer.py
    +++ /dev/null
    @@ -1,31 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments regex lexer tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -import unittest
    -
    -from pygments.lexers.markup import MarkdownLexer
    -
    -
    -class SameTextTests(unittest.TestCase):
    -
    -    lexer = MarkdownLexer()
    -
    -    def assert_same_text(self, text):
    -        """Show that lexed markdown does not remove any content. """
    -        tokens = list(self.lexer.get_tokens_unprocessed(text))
    -        output = ''.join(t[2] for t in tokens)
    -        self.assertEqual(text, output)
    -
    -    def test_code_fence(self):
    -        self.assert_same_text(r'```\nfoo\n```\n')
    -
    -    def test_code_fence_gsm(self):
    -        self.assert_same_text(r'```markdown\nfoo\n```\n')
    -
    -    def test_code_fence_gsm_with_no_lexer(self):
    -        self.assert_same_text(r'```invalid-lexer\nfoo\n```\n')
    diff --git a/tests/test_modeline.py b/tests/test_modeline.py
    deleted file mode 100644
    index 6e1f16a4..00000000
    --- a/tests/test_modeline.py
    +++ /dev/null
    @@ -1,26 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Tests for the vim modeline feature
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -from __future__ import print_function
    -
    -from pygments import modeline
    -
    -
    -def test_lexer_classes():
    -    def verify(buf):
    -        assert modeline.get_filetype_from_buffer(buf) == 'python'
    -
    -    for buf in [
    -            'vi: ft=python' + '\n' * 8,
    -            'vi: ft=python' + '\n' * 8,
    -            '\n\n\n\nvi=8: syntax=python' + '\n' * 8,
    -            '\n' * 8 + 'ex: filetype=python',
    -            '\n' * 8 + 'vim: some,other,syn=python\n\n\n\n'
    -    ]:
    -        yield verify, buf
    diff --git a/tests/test_objectiveclexer.py b/tests/test_objectiveclexer.py
    deleted file mode 100644
    index 3db6a9e0..00000000
    --- a/tests/test_objectiveclexer.py
    +++ /dev/null
    @@ -1,92 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Basic CLexer Test
    -    ~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -import os
    -
    -from pygments.token import Token
    -from pygments.lexers import ObjectiveCLexer
    -
    -
    -class ObjectiveCLexerTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = ObjectiveCLexer()
    -
    -    def testLiteralNumberInt(self):
    -        fragment = u'@(1);\n'
    -        expected = [
    -            (Token.Literal, u'@('),
    -            (Token.Literal.Number.Integer, u'1'),
    -            (Token.Literal, u')'),
    -            (Token.Punctuation, u';'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    -
    -    def testLiteralNumberExpression(self):
    -        fragment = u'@(1+2);\n'
    -        expected = [
    -            (Token.Literal, u'@('),
    -            (Token.Literal.Number.Integer, u'1'),
    -            (Token.Operator, u'+'),
    -            (Token.Literal.Number.Integer, u'2'),
    -            (Token.Literal, u')'),
    -            (Token.Punctuation, u';'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    -
    -    def testLiteralNumberNestedExpression(self):
    -        fragment = u'@(1+(2+3));\n'
    -        expected = [
    -            (Token.Literal, u'@('),
    -            (Token.Literal.Number.Integer, u'1'),
    -            (Token.Operator, u'+'),
    -            (Token.Punctuation, u'('),
    -            (Token.Literal.Number.Integer, u'2'),
    -            (Token.Operator, u'+'),
    -            (Token.Literal.Number.Integer, u'3'),
    -            (Token.Punctuation, u')'),
    -            (Token.Literal, u')'),
    -            (Token.Punctuation, u';'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    -
    -    def testLiteralNumberBool(self):
    -        fragment = u'@NO;\n'
    -        expected = [
    -            (Token.Literal.Number, u'@NO'),
    -            (Token.Punctuation, u';'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    -
    -    def testLiteralNumberBoolExpression(self):
    -        fragment = u'@(YES);\n'
    -        expected = [
    -            (Token.Literal, u'@('),
    -            (Token.Name.Builtin, u'YES'),
    -            (Token.Literal, u')'),
    -            (Token.Punctuation, u';'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    -
    -    def testModuleImport(self):
    -        fragment = u'@import ModuleA;\n'
    -        expected = [
    -            (Token.Keyword, u'@import'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'ModuleA'),
    -            (Token.Punctuation, u';'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py
    deleted file mode 100644
    index 30f9eca8..00000000
    --- a/tests/test_perllexer.py
    +++ /dev/null
    @@ -1,157 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments regex lexer tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import time
    -import unittest
    -
    -from pygments.token import Keyword, Name, String, Text
    -from pygments.lexers.perl import PerlLexer
    -
    -
    -class RunawayRegexTest(unittest.TestCase):
    -    # A previous version of the Perl lexer would spend a great deal of
    -    # time backtracking when given particular strings.  These tests show that
    -    # the runaway backtracking doesn't happen any more (at least for the given
    -    # cases).
    -
    -    lexer = PerlLexer()
    -
    -    ### Test helpers.
    -
    -    def assert_single_token(self, s, token):
    -        """Show that a given string generates only one token."""
    -        tokens = list(self.lexer.get_tokens_unprocessed(s))
    -        self.assertEqual(len(tokens), 1, tokens)
    -        self.assertEqual(s, tokens[0][2])
    -        self.assertEqual(token, tokens[0][1])
    -
    -    def assert_tokens(self, strings, expected_tokens):
    -        """Show that a given string generates the expected tokens."""
    -        tokens = list(self.lexer.get_tokens_unprocessed(''.join(strings)))
    -        self.assertEqual(len(tokens), len(expected_tokens), tokens)
    -        for index, s in enumerate(strings):
    -            self.assertEqual(s, tokens[index][2])
    -            self.assertEqual(expected_tokens[index], tokens[index][1])
    -
    -    def assert_fast_tokenization(self, s):
    -        """Show that a given string is tokenized quickly."""
    -        start = time.time()
    -        tokens = list(self.lexer.get_tokens_unprocessed(s))
    -        end = time.time()
    -        # Isn't 10 seconds kind of a long time?  Yes, but we don't want false
    -        # positives when the tests are starved for CPU time.
    -        if end-start > 10:
    -            self.fail('tokenization took too long')
    -        return tokens
    -
    -    ### Strings.
    -
    -    def test_single_quote_strings(self):
    -        self.assert_single_token(r"'foo\tbar\\\'baz'", String)
    -        self.assert_fast_tokenization("'" + '\\'*999)
    -
    -    def test_double_quote_strings(self):
    -        self.assert_single_token(r'"foo\tbar\\\"baz"', String)
    -        self.assert_fast_tokenization('"' + '\\'*999)
    -
    -    def test_backtick_strings(self):
    -        self.assert_single_token(r'`foo\tbar\\\`baz`', String.Backtick)
    -        self.assert_fast_tokenization('`' + '\\'*999)
    -
    -    ### Regex matches with various delimiters.
    -
    -    def test_match(self):
    -        self.assert_single_token(r'/aa\tbb/', String.Regex)
    -        self.assert_fast_tokenization('/' + '\\'*999)
    -
    -    def test_match_with_slash(self):
    -        self.assert_tokens(['m', '/\n\\t\\\\/'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m/xxx\n' + '\\'*999)
    -
    -    def test_match_with_bang(self):
    -        self.assert_tokens(['m', r'!aa\t\!bb!'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m!' + '\\'*999)
    -
    -    def test_match_with_brace(self):
    -        self.assert_tokens(['m', r'{aa\t\}bb}'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m{' + '\\'*999)
    -
    -    def test_match_with_angle_brackets(self):
    -        self.assert_tokens(['m', r'bb>'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m<' + '\\'*999)
    -
    -    def test_match_with_parenthesis(self):
    -        self.assert_tokens(['m', r'(aa\t\)bb)'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m(' + '\\'*999)
    -
    -    def test_match_with_at_sign(self):
    -        self.assert_tokens(['m', r'@aa\t\@bb@'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m@' + '\\'*999)
    -
    -    def test_match_with_percent_sign(self):
    -        self.assert_tokens(['m', r'%aa\t\%bb%'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m%' + '\\'*999)
    -
    -    def test_match_with_dollar_sign(self):
    -        self.assert_tokens(['m', r'$aa\t\$bb$'], [String.Regex, String.Regex])
    -        self.assert_fast_tokenization('m$' + '\\'*999)
    -
    -    ### Regex substitutions with various delimeters.
    -
    -    def test_substitution_with_slash(self):
    -        self.assert_single_token('s/aaa/bbb/g', String.Regex)
    -        self.assert_fast_tokenization('s/foo/' + '\\'*999)
    -
    -    def test_substitution_with_at_sign(self):
    -        self.assert_single_token(r's@aaa@bbb@g', String.Regex)
    -        self.assert_fast_tokenization('s@foo@' + '\\'*999)
    -
    -    def test_substitution_with_percent_sign(self):
    -        self.assert_single_token(r's%aaa%bbb%g', String.Regex)
    -        self.assert_fast_tokenization('s%foo%' + '\\'*999)
    -
    -    def test_substitution_with_brace(self):
    -        self.assert_single_token(r's{aaa}', String.Regex)
    -        self.assert_fast_tokenization('s{' + '\\'*999)
    -
    -    def test_substitution_with_angle_bracket(self):
    -        self.assert_single_token(r's', String.Regex)
    -        self.assert_fast_tokenization('s<' + '\\'*999)
    -
    -    def test_substitution_with_angle_bracket(self):
    -        self.assert_single_token(r's', String.Regex)
    -        self.assert_fast_tokenization('s<' + '\\'*999)
    -
    -    def test_substitution_with_square_bracket(self):
    -        self.assert_single_token(r's[aaa]', String.Regex)
    -        self.assert_fast_tokenization('s[' + '\\'*999)
    -
    -    def test_substitution_with_parenthesis(self):
    -        self.assert_single_token(r's(aaa)', String.Regex)
    -        self.assert_fast_tokenization('s(' + '\\'*999)
    -
    -    ### Namespaces/modules
    -
    -    def test_package_statement(self):
    -        self.assert_tokens(['package', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
    -        self.assert_tokens(['package', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
    -
    -    def test_use_statement(self):
    -        self.assert_tokens(['use', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
    -        self.assert_tokens(['use', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
    -
    -    def test_no_statement(self):
    -        self.assert_tokens(['no', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
    -        self.assert_tokens(['no', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
    -
    -    def test_require_statement(self):
    -        self.assert_tokens(['require', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
    -        self.assert_tokens(['require', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
    -        self.assert_tokens(['require', ' ', '"Foo/Bar.pm"'], [Keyword, Text, String])
    -
    diff --git a/tests/test_php.py b/tests/test_php.py
    deleted file mode 100644
    index bb047b91..00000000
    --- a/tests/test_php.py
    +++ /dev/null
    @@ -1,36 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    PHP Tests
    -    ~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexers import PhpLexer
    -from pygments.token import Token
    -
    -
    -class PhpTest(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = PhpLexer()
    -
    -    def testStringEscapingRun(self):
    -        fragment = '\n'
    -        tokens = [
    -            (Token.Comment.Preproc, ''),
    -            (Token.Other, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_praat.py b/tests/test_praat.py
    deleted file mode 100644
    index 9bf3ce76..00000000
    --- a/tests/test_praat.py
    +++ /dev/null
    @@ -1,130 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Praat lexer tests
    -    ~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Token
    -from pygments.lexers import PraatLexer
    -
    -class PraatTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = PraatLexer()
    -        self.maxDiff = None
    -
    -    def testNumericAssignment(self):
    -        fragment = u'var = -15e4\n'
    -        tokens = [
    -            (Token.Text, u'var'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'='),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'-'),
    -            (Token.Literal.Number, u'15e4'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testStringAssignment(self):
    -        fragment = u'var$ = "foo"\n'
    -        tokens = [
    -            (Token.Text, u'var$'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'='),
    -            (Token.Text, u' '),
    -            (Token.Literal.String, u'"'),
    -            (Token.Literal.String, u'foo'),
    -            (Token.Literal.String, u'"'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testStringEscapedQuotes(self):
    -        fragment = u'"it said ""foo"""\n'
    -        tokens = [
    -            (Token.Literal.String, u'"'),
    -            (Token.Literal.String, u'it said '),
    -            (Token.Literal.String, u'"'),
    -            (Token.Literal.String, u'"'),
    -            (Token.Literal.String, u'foo'),
    -            (Token.Literal.String, u'"'),
    -            (Token.Literal.String, u'"'),
    -            (Token.Literal.String, u'"'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testFunctionCall(self):
    -        fragment = u'selected("Sound", i+(a*b))\n'
    -        tokens = [
    -            (Token.Name.Function, u'selected'),
    -            (Token.Punctuation, u'('),
    -            (Token.Literal.String, u'"'),
    -            (Token.Literal.String, u'Sound'),
    -            (Token.Literal.String, u'"'),
    -            (Token.Punctuation, u','),
    -            (Token.Text, u' '),
    -            (Token.Text, u'i'),
    -            (Token.Operator, u'+'),
    -            (Token.Text, u'('),
    -            (Token.Text, u'a'),
    -            (Token.Operator, u'*'),
    -            (Token.Text, u'b'),
    -            (Token.Text, u')'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testBrokenUnquotedString(self):
    -        fragment = u'printline string\n... \'interpolated\' string\n'
    -        tokens = [
    -            (Token.Keyword, u'printline'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String, u'string'),
    -            (Token.Text, u'\n'),
    -            (Token.Punctuation, u'...'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Interpol, u"'"),
    -            (Token.Literal.String.Interpol, u'interpolated'),
    -            (Token.Literal.String.Interpol, u"'"),
    -            (Token.Text, u' '),
    -            (Token.Literal.String, u'string'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testInlinIf(self):
    -        fragment = u'var = if true == 1 then -1 else 0 fi'
    -        tokens = [
    -            (Token.Text, u'var'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'='),
    -            (Token.Text, u' '),
    -            (Token.Keyword, u'if'),
    -            (Token.Text, u' '),
    -            (Token.Text, u'true'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'=='),
    -            (Token.Text, u' '),
    -            (Token.Literal.Number, u'1'),
    -            (Token.Text, u' '),
    -            (Token.Keyword, u'then'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'-'),
    -            (Token.Literal.Number, u'1'),
    -            (Token.Text, u' '),
    -            (Token.Keyword, u'else'),
    -            (Token.Text, u' '),
    -            (Token.Literal.Number, u'0'),
    -            (Token.Text, u' '),
    -            (Token.Keyword, u'fi'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_properties.py b/tests/test_properties.py
    deleted file mode 100644
    index aaa8ce29..00000000
    --- a/tests/test_properties.py
    +++ /dev/null
    @@ -1,89 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Properties Tests
    -    ~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexers.configs import PropertiesLexer
    -from pygments.token import Token
    -
    -
    -class PropertiesTest(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = PropertiesLexer()
    -
    -    def test_comments(self):
    -        """
    -        Assures lines lead by either # or ! are recognized as a comment
    -        """
    -        fragment = '! a comment\n# also a comment\n'
    -        tokens = [
    -            (Token.Comment, '! a comment'),
    -            (Token.Text, '\n'),
    -            (Token.Comment, '# also a comment'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def test_leading_whitespace_comments(self):
    -        fragment = '    # comment\n'
    -        tokens = [
    -            (Token.Text, '    '),
    -            (Token.Comment, '# comment'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def test_escaped_space_in_key(self):
    -        fragment = 'key = value\n'
    -        tokens = [
    -            (Token.Name.Attribute, 'key'),
    -            (Token.Text, ' '),
    -            (Token.Operator, '='),
    -            (Token.Text, ' '),
    -            (Token.Literal.String, 'value'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def test_escaped_space_in_value(self):
    -        fragment = 'key = doubleword\\ value\n'
    -        tokens = [
    -            (Token.Name.Attribute, 'key'),
    -            (Token.Text, ' '),
    -            (Token.Operator, '='),
    -            (Token.Text, ' '),
    -            (Token.Literal.String, 'doubleword\\ value'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def test_space_delimited_kv_pair(self):
    -        fragment = 'key value\n'
    -        tokens = [
    -            (Token.Name.Attribute, 'key'),
    -            (Token.Text, ' '),
    -            (Token.Literal.String, 'value\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def test_just_key(self):
    -        fragment = 'justkey\n'
    -        tokens = [
    -            (Token.Name.Attribute, 'justkey'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def test_just_key_with_space(self):
    -        fragment = 'just\\ key\n'
    -        tokens = [
    -            (Token.Name.Attribute, 'just\\ key'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_python.py b/tests/test_python.py
    deleted file mode 100644
    index b9c6c49b..00000000
    --- a/tests/test_python.py
    +++ /dev/null
    @@ -1,133 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Python Tests
    -    ~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexers import PythonLexer, Python3Lexer
    -from pygments.token import Token
    -
    -
    -class PythonTest(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = PythonLexer()
    -
    -    def test_cls_builtin(self):
    -        """
    -        Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo
    -
    -        """
    -        fragment = 'class TestClass():\n    @classmethod\n    def hello(cls):\n        pass\n'
    -        tokens = [
    -            (Token.Keyword, 'class'),
    -            (Token.Text, ' '),
    -            (Token.Name.Class, 'TestClass'),
    -            (Token.Punctuation, '('),
    -            (Token.Punctuation, ')'),
    -            (Token.Punctuation, ':'),
    -            (Token.Text, '\n'),
    -            (Token.Text, '    '),
    -            (Token.Name.Decorator, '@classmethod'),
    -            (Token.Text, '\n'),
    -            (Token.Text, '    '),
    -            (Token.Keyword, 'def'),
    -            (Token.Text, ' '),
    -            (Token.Name.Function, 'hello'),
    -            (Token.Punctuation, '('),
    -            (Token.Name.Builtin.Pseudo, 'cls'),
    -            (Token.Punctuation, ')'),
    -            (Token.Punctuation, ':'),
    -            (Token.Text, '\n'),
    -            (Token.Text, '        '),
    -            (Token.Keyword, 'pass'),
    -            (Token.Text, '\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -
    -class Python3Test(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = Python3Lexer()
    -        
    -    def testNeedsName(self):
    -        """
    -        Tests that '@' is recognized as an Operator
    -        """
    -        fragment = u'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
    -        tokens = [
    -            (Token.Name, u'S'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'='),
    -            (Token.Text, u' '),
    -            (Token.Punctuation, u'('),
    -            (Token.Name, u'H'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'@'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'beta'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'-'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'r'),
    -            (Token.Punctuation, u')'),
    -            (Token.Operator, u'.'),
    -            (Token.Name, u'T'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'@'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'inv'),
    -            (Token.Punctuation, u'('),
    -            (Token.Name, u'H'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'@'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'V'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'@'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'H'),
    -            (Token.Operator, u'.'),
    -            (Token.Name, u'T'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'@'),
    -            (Token.Text, u' '),
    -            (Token.Punctuation, u'('),
    -            (Token.Name, u'H'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'@'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'beta'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'-'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'r'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def test_pep_515(self):
    -        """
    -        Tests that the lexer can parse numeric literals with underscores
    -        """
    -        fragments = (
    -            (Token.Literal.Number.Integer, u'1_000_000'),
    -            (Token.Literal.Number.Float, u'1_000.000_001'),
    -            (Token.Literal.Number.Float, u'1_000e1_000j'),
    -            (Token.Literal.Number.Hex, u'0xCAFE_F00D'),
    -            (Token.Literal.Number.Bin, u'0b_0011_1111_0100_1110'),
    -            (Token.Literal.Number.Oct, u'0o_777_123'),
    -        )
    -
    -        for token, fragment in fragments:
    -            tokens = [
    -                (token, fragment),
    -                (Token.Text, u'\n'),
    -            ]
    -            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
    deleted file mode 100644
    index f40b8b63..00000000
    --- a/tests/test_qbasiclexer.py
    +++ /dev/null
    @@ -1,43 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Tests for QBasic
    -    ~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import glob
    -import os
    -import unittest
    -
    -from pygments.token import Token
    -from pygments.lexers.basic import QBasicLexer
    -
    -
    -class QBasicTest(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = QBasicLexer()
    -        self.maxDiff = None
    -
    -    def testKeywordsWithDollar(self):
    -        fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
    -        expected = [
    -            (Token.Keyword.Declaration, u'DIM'),
    -            (Token.Text.Whitespace, u' '),
    -            (Token.Name.Variable.Global, u'x'),
    -            (Token.Text, u'\n'),
    -            (Token.Name.Variable.Global, u'x'),
    -            (Token.Text.Whitespace, u' '),
    -            (Token.Operator, u'='),
    -            (Token.Text.Whitespace, u' '),
    -            (Token.Keyword.Reserved, u'RIGHT$'),
    -            (Token.Punctuation, u'('),
    -            (Token.Literal.String.Double, u'"abc"'),
    -            (Token.Punctuation, u','),
    -            (Token.Text.Whitespace, u' '),
    -            (Token.Literal.Number.Integer.Long, u'1'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_r.py b/tests/test_r.py
    deleted file mode 100644
    index 70148e53..00000000
    --- a/tests/test_r.py
    +++ /dev/null
    @@ -1,70 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    R Tests
    -    ~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexers import SLexer
    -from pygments.token import Token, Name, Punctuation
    -
    -
    -class RTest(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = SLexer()
    -
    -    def testCall(self):
    -        fragment = u'f(1, a)\n'
    -        tokens = [
    -            (Name.Function, u'f'),
    -            (Punctuation, u'('),
    -            (Token.Literal.Number, u'1'),
    -            (Punctuation, u','),
    -            (Token.Text, u' '),
    -            (Token.Name, u'a'),
    -            (Punctuation, u')'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testName1(self):
    -        fragment = u'._a_2.c'
    -        tokens = [
    -            (Name, u'._a_2.c'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testName2(self):
    -        # Invalid names are valid if backticks are used
    -        fragment = u'`.1 blah`'
    -        tokens = [
    -            (Name, u'`.1 blah`'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testName3(self):
    -        # Internal backticks can be escaped
    -        fragment = u'`.1 \\` blah`'
    -        tokens = [
    -            (Name, u'`.1 \\` blah`'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testCustomOperator(self):
    -        fragment = u'7 % and % 8'
    -        tokens = [
    -            (Token.Literal.Number, u'7'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'% and %'),
    -            (Token.Text, u' '),
    -            (Token.Literal.Number, u'8'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py
    deleted file mode 100644
    index adc05a93..00000000
    --- a/tests/test_regexlexer.py
    +++ /dev/null
    @@ -1,66 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments regex lexer tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Text
    -from pygments.lexer import RegexLexer
    -from pygments.lexer import default
    -
    -
    -class TestLexer(RegexLexer):
    -    """Test tuple state transitions including #pop."""
    -    tokens = {
    -        'root': [
    -            ('a', Text.Root, 'rag'),
    -            ('e', Text.Root),
    -            ('#', Text.Root, '#pop'),
    -            ('@', Text.Root, ('#pop', '#pop')),
    -            default(('beer', 'beer'))
    -        ],
    -        'beer': [
    -            ('d', Text.Beer, ('#pop', '#pop')),
    -        ],
    -        'rag': [
    -            ('b', Text.Rag, '#push'),
    -            ('c', Text.Rag, ('#pop', 'beer')),
    -        ],
    -    }
    -
    -
    -class TupleTransTest(unittest.TestCase):
    -    def test(self):
    -        lx = TestLexer()
    -        toks = list(lx.get_tokens_unprocessed('abcde'))
    -        self.assertEqual(toks, [
    -            (0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
    -            (3, Text.Beer, 'd'), (4, Text.Root, 'e')])
    -
    -    def test_multiline(self):
    -        lx = TestLexer()
    -        toks = list(lx.get_tokens_unprocessed('a\ne'))
    -        self.assertEqual(toks, [
    -            (0, Text.Root, 'a'), (1, Text, u'\n'), (2, Text.Root, 'e')])
    -
    -    def test_default(self):
    -        lx = TestLexer()
    -        toks = list(lx.get_tokens_unprocessed('d'))
    -        self.assertEqual(toks, [(0, Text.Beer, 'd')])
    -
    -
    -class PopEmptyTest(unittest.TestCase):
    -    def test_regular(self):
    -        lx = TestLexer()
    -        toks = list(lx.get_tokens_unprocessed('#e'))
    -        self.assertEqual(toks, [(0, Text.Root, '#'), (1, Text.Root, 'e')])
    -
    -    def test_tuple(self):
    -        lx = TestLexer()
    -        toks = list(lx.get_tokens_unprocessed('@e'))
    -        self.assertEqual(toks, [(0, Text.Root, '@'), (1, Text.Root, 'e')])
    diff --git a/tests/test_regexopt.py b/tests/test_regexopt.py
    deleted file mode 100644
    index 9c44f498..00000000
    --- a/tests/test_regexopt.py
    +++ /dev/null
    @@ -1,110 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Tests for pygments.regexopt
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import re
    -import random
    -import unittest
    -import itertools
    -
    -from pygments.regexopt import regex_opt
    -
    -ALPHABET = ['a', 'b', 'c', 'd', 'e']
    -
    -try:
    -    from itertools import combinations_with_replacement
    -    N_TRIES = 15
    -except ImportError:
    -    # Python 2.6
    -    def combinations_with_replacement(iterable, r):
    -        pool = tuple(iterable)
    -        n = len(pool)
    -        for indices in itertools.product(range(n), repeat=r):
    -            if sorted(indices) == list(indices):
    -                yield tuple(pool[i] for i in indices)
    -    N_TRIES = 9
    -
    -
    -class RegexOptTestCase(unittest.TestCase):
    -
    -    def generate_keywordlist(self, length):
    -        return [''.join(p) for p in
    -                combinations_with_replacement(ALPHABET, length)]
    -
    -    def test_randomly(self):
    -        # generate a list of all possible keywords of a certain length using
    -        # a restricted alphabet, then choose some to match and make sure only
    -        # those do
    -        for n in range(3, N_TRIES):
    -            kwlist = self.generate_keywordlist(n)
    -            to_match = random.sample(kwlist,
    -                                     random.randint(1, len(kwlist) - 1))
    -            no_match = set(kwlist) - set(to_match)
    -            rex = re.compile(regex_opt(to_match))
    -            self.assertEqual(rex.groups, 1)
    -            for w in to_match:
    -                self.assertTrue(rex.match(w))
    -            for w in no_match:
    -                self.assertFalse(rex.match(w))
    -
    -    def test_prefix(self):
    -        opt = regex_opt(('a', 'b'), prefix=r':{1,2}')
    -        print(opt)
    -        rex = re.compile(opt)
    -        self.assertFalse(rex.match('a'))
    -        self.assertTrue(rex.match('::a'))
    -        self.assertFalse(rex.match(':::')) # fullmatch
    -
    -    def test_suffix(self):
    -        opt = regex_opt(('a', 'b'), suffix=r':{1,2}')
    -        print(opt)
    -        rex = re.compile(opt)
    -        self.assertFalse(rex.match('a'))
    -        self.assertTrue(rex.match('a::'))
    -        self.assertFalse(rex.match(':::')) # fullmatch
    -
    -    def test_suffix_opt(self):
    -        # test that detected suffixes remain sorted.
    -        opt = regex_opt(('afoo', 'abfoo'))
    -        print(opt)
    -        rex = re.compile(opt)
    -        m = rex.match('abfoo')
    -        self.assertEqual(5, m.end())
    -
    -    def test_different_length_grouping(self):
    -        opt = regex_opt(('a', 'xyz'))
    -        print(opt)
    -        rex = re.compile(opt)
    -        self.assertTrue(rex.match('a'))
    -        self.assertTrue(rex.match('xyz'))
    -        self.assertFalse(rex.match('b'))
    -        self.assertEqual(1, rex.groups)
    -
    -    def test_same_length_grouping(self):
    -        opt = regex_opt(('a', 'b'))
    -        print(opt)
    -        rex = re.compile(opt)
    -        self.assertTrue(rex.match('a'))
    -        self.assertTrue(rex.match('b'))
    -        self.assertFalse(rex.match('x'))
    -
    -        self.assertEqual(1, rex.groups)
    -        groups = rex.match('a').groups()
    -        self.assertEqual(('a',), groups)
    -
    -    def test_same_length_suffix_grouping(self):
    -        opt = regex_opt(('a', 'b'), suffix='(m)')
    -        print(opt)
    -        rex = re.compile(opt)
    -        self.assertTrue(rex.match('am'))
    -        self.assertTrue(rex.match('bm'))
    -        self.assertFalse(rex.match('xm'))
    -        self.assertFalse(rex.match('ax'))
    -        self.assertEqual(2, rex.groups)
    -        groups = rex.match('am').groups()
    -        self.assertEqual(('a', 'm'), groups)
    diff --git a/tests/test_rtf_formatter.py b/tests/test_rtf_formatter.py
    deleted file mode 100644
    index 80ce01f5..00000000
    --- a/tests/test_rtf_formatter.py
    +++ /dev/null
    @@ -1,109 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments RTF formatter tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -from string_asserts import StringTests
    -
    -from pygments.util import StringIO
    -from pygments.formatters import RtfFormatter
    -from pygments.lexers.special import TextLexer
    -
    -class RtfFormatterTest(StringTests, unittest.TestCase):
    -    foot = (r'\par' '\n' r'}')
    -
    -    def _escape(self, string):
    -        return(string.replace("\n", r"\n"))
    -
    -    def _build_message(self, *args, **kwargs):
    -        string = kwargs.get('string', None)
    -        t = self._escape(kwargs.get('t', ''))
    -        expected = self._escape(kwargs.get('expected', ''))
    -        result = self._escape(kwargs.get('result', ''))
    -
    -        if string is None:
    -            string = (u"The expected output of '{t}'\n"
    -                      u"\t\tShould be '{expected}'\n"
    -                      u"\t\tActually outputs '{result}'\n"
    -                      u"\t(WARNING: Partial Output of Result!)")
    -
    -        end = -(len(self._escape(self.foot)))
    -        start = end-len(expected)
    -
    -        return string.format(t=t,
    -                             result = result[start:end],
    -                             expected = expected)
    -
    -    def format_rtf(self, t):
    -        tokensource = list(TextLexer().get_tokens(t))
    -        fmt = RtfFormatter()
    -        buf = StringIO()
    -        fmt.format(tokensource, buf)
    -        result = buf.getvalue()
    -        buf.close()
    -        return result
    -
    -    def test_rtf_header(self):
    -        t = u''
    -        result = self.format_rtf(t)
    -        expected = r'{\rtf1\ansi\uc0'
    -        msg = (u"RTF documents are expected to start with '{expected}'\n"
    -               u"\t\tStarts intead with '{result}'\n"
    -               u"\t(WARNING: Partial Output of Result!)".format(
    -                   expected = expected,
    -                   result = result[:len(expected)]))
    -        self.assertStartsWith(result, expected, msg)
    -
    -    def test_rtf_footer(self):
    -        t = u''
    -        result = self.format_rtf(t)
    -        expected = self.foot
    -        msg = (u"RTF documents are expected to end with '{expected}'\n"
    -               u"\t\tEnds intead with '{result}'\n"
    -               u"\t(WARNING: Partial Output of Result!)".format(
    -                   expected = self._escape(expected),
    -                   result = self._escape(result[-len(expected):])))
    -        self.assertEndsWith(result, expected, msg)
    -
    -    def test_ascii_characters(self):
    -        t = u'a b c d ~'
    -        result = self.format_rtf(t)
    -        expected = (r'a b c d ~')
    -        if not result.endswith(self.foot):
    -            return(unittest.skip('RTF Footer incorrect'))
    -        msg = self._build_message(t=t, result=result, expected=expected)
    -        self.assertEndsWith(result, expected+self.foot, msg)
    -
    -    def test_escape_characters(self):
    -        t = u'\\ {{'
    -        result = self.format_rtf(t)
    -        expected = (r'\\ \{\{')
    -        if not result.endswith(self.foot):
    -            return(unittest.skip('RTF Footer incorrect'))
    -        msg = self._build_message(t=t, result=result, expected=expected)
    -        self.assertEndsWith(result, expected+self.foot, msg)
    -
    -    def test_single_characters(self):
    -        t = u'â € ¤ каждой'
    -        result = self.format_rtf(t)
    -        expected = (r'{\u226} {\u8364} {\u164} '
    -                    r'{\u1082}{\u1072}{\u1078}{\u1076}{\u1086}{\u1081}')
    -        if not result.endswith(self.foot):
    -            return(unittest.skip('RTF Footer incorrect'))
    -        msg = self._build_message(t=t, result=result, expected=expected)
    -        self.assertEndsWith(result, expected+self.foot, msg)
    -
    -    def test_double_characters(self):
    -        t = u'က 힣 ↕ ↕︎ 鼖'
    -        result = self.format_rtf(t)
    -        expected = (r'{\u4096} {\u55203} {\u8597} '
    -                    r'{\u8597}{\u65038} {\u55422}{\u56859}')
    -        if not result.endswith(self.foot):
    -            return(unittest.skip('RTF Footer incorrect'))
    -        msg = self._build_message(t=t, result=result, expected=expected)
    -        self.assertEndsWith(result, expected+self.foot, msg)
    diff --git a/tests/test_ruby.py b/tests/test_ruby.py
    deleted file mode 100644
    index 45a77469..00000000
    --- a/tests/test_ruby.py
    +++ /dev/null
    @@ -1,145 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Basic RubyLexer Test
    -    ~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Operator, Number, Text, Token
    -from pygments.lexers import RubyLexer
    -
    -
    -class RubyTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = RubyLexer()
    -        self.maxDiff = None
    -
    -    def testRangeSyntax1(self):
    -        fragment = u'1..3\n'
    -        tokens = [
    -            (Number.Integer, u'1'),
    -            (Operator, u'..'),
    -            (Number.Integer, u'3'),
    -            (Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testRangeSyntax2(self):
    -        fragment = u'1...3\n'
    -        tokens = [
    -            (Number.Integer, u'1'),
    -            (Operator, u'...'),
    -            (Number.Integer, u'3'),
    -            (Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testRangeSyntax3(self):
    -        fragment = u'1 .. 3\n'
    -        tokens = [
    -            (Number.Integer, u'1'),
    -            (Text, u' '),
    -            (Operator, u'..'),
    -            (Text, u' '),
    -            (Number.Integer, u'3'),
    -            (Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testInterpolationNestedCurly(self):
    -        fragment = (
    -            u'"A#{ (3..5).group_by { |x| x/2}.map '
    -            u'do |k,v| "#{k}" end.join }" + "Z"\n')
    -
    -        tokens = [
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Literal.String.Double, u'A'),
    -            (Token.Literal.String.Interpol, u'#{'),
    -            (Token.Text, u' '),
    -            (Token.Punctuation, u'('),
    -            (Token.Literal.Number.Integer, u'3'),
    -            (Token.Operator, u'..'),
    -            (Token.Literal.Number.Integer, u'5'),
    -            (Token.Punctuation, u')'),
    -            (Token.Operator, u'.'),
    -            (Token.Name, u'group_by'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Interpol, u'{'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'|'),
    -            (Token.Name, u'x'),
    -            (Token.Operator, u'|'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'x'),
    -            (Token.Operator, u'/'),
    -            (Token.Literal.Number.Integer, u'2'),
    -            (Token.Literal.String.Interpol, u'}'),
    -            (Token.Operator, u'.'),
    -            (Token.Name, u'map'),
    -            (Token.Text, u' '),
    -            (Token.Keyword, u'do'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'|'),
    -            (Token.Name, u'k'),
    -            (Token.Punctuation, u','),
    -            (Token.Name, u'v'),
    -            (Token.Operator, u'|'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Literal.String.Interpol, u'#{'),
    -            (Token.Name, u'k'),
    -            (Token.Literal.String.Interpol, u'}'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u' '),
    -            (Token.Keyword, u'end'),
    -            (Token.Operator, u'.'),
    -            (Token.Name, u'join'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Interpol, u'}'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'+'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Literal.String.Double, u'Z'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testOperatorMethods(self):
    -        fragment = u'x.==4\n'
    -        tokens = [
    -            (Token.Name, u'x'),
    -            (Token.Operator, u'.'),
    -            (Token.Name.Operator, u'=='),
    -            (Token.Literal.Number.Integer, u'4'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testEscapedBracestring(self):
    -        fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
    -        tokens = [
    -            (Token.Name, u'str'),
    -            (Token.Operator, u'.'),
    -            (Token.Name, u'gsub'),
    -            (Token.Punctuation, u'('),
    -            (Token.Literal.String.Regex, u'%r{'),
    -            (Token.Literal.String.Regex, u'\\\\'),
    -            (Token.Literal.String.Regex, u'\\\\'),
    -            (Token.Literal.String.Regex, u'}'),
    -            (Token.Punctuation, u','),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Literal.String.Double, u'/'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Punctuation, u')'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_shell.py b/tests/test_shell.py
    deleted file mode 100644
    index 6b24eb43..00000000
    --- a/tests/test_shell.py
    +++ /dev/null
    @@ -1,159 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Basic Shell Tests
    -    ~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Token
    -from pygments.lexers import BashLexer, BashSessionLexer, MSDOSSessionLexer
    -
    -
    -class BashTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = BashLexer()
    -        self.maxDiff = None
    -
    -    def testCurlyNoEscapeAndQuotes(self):
    -        fragment = u'echo "${a//["b"]/}"\n'
    -        tokens = [
    -            (Token.Name.Builtin, u'echo'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.String.Interpol, u'${'),
    -            (Token.Name.Variable, u'a'),
    -            (Token.Punctuation, u'//['),
    -            (Token.Literal.String.Double, u'"b"'),
    -            (Token.Punctuation, u']/'),
    -            (Token.String.Interpol, u'}'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testCurlyWithEscape(self):
    -        fragment = u'echo ${a//[\\"]/}\n'
    -        tokens = [
    -            (Token.Name.Builtin, u'echo'),
    -            (Token.Text, u' '),
    -            (Token.String.Interpol, u'${'),
    -            (Token.Name.Variable, u'a'),
    -            (Token.Punctuation, u'//['),
    -            (Token.Literal.String.Escape, u'\\"'),
    -            (Token.Punctuation, u']/'),
    -            (Token.String.Interpol, u'}'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testParsedSingle(self):
    -        fragment = u"a=$'abc\\''\n"
    -        tokens = [
    -            (Token.Name.Variable, u'a'),
    -            (Token.Operator, u'='),
    -            (Token.Literal.String.Single, u"$'abc\\''"),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testShortVariableNames(self):
    -        fragment = u'x="$"\ny="$_"\nz="$abc"\n'
    -        tokens = [
    -            # single lone $
    -            (Token.Name.Variable, u'x'),
    -            (Token.Operator, u'='),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u'$'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u'\n'),
    -            # single letter shell var
    -            (Token.Name.Variable, u'y'),
    -            (Token.Operator, u'='),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Name.Variable, u'$_'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u'\n'),
    -            # multi-letter user var
    -            (Token.Name.Variable, u'z'),
    -            (Token.Operator, u'='),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Name.Variable, u'$abc'),
    -            (Token.Literal.String.Double, u'"'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testArrayNums(self):
    -        fragment = u'a=(1 2 3)\n'
    -        tokens = [
    -            (Token.Name.Variable, u'a'),
    -            (Token.Operator, u'='),
    -            (Token.Operator, u'('),
    -            (Token.Literal.Number, u'1'),
    -            (Token.Text, u' '),
    -            (Token.Literal.Number, u'2'),
    -            (Token.Text, u' '),
    -            (Token.Literal.Number, u'3'),
    -            (Token.Operator, u')'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -    def testEndOfLineNums(self):
    -        fragment = u'a=1\nb=2 # comment\n'
    -        tokens = [
    -            (Token.Name.Variable, u'a'),
    -            (Token.Operator, u'='),
    -            (Token.Literal.Number, u'1'),
    -            (Token.Text, u'\n'),
    -            (Token.Name.Variable, u'b'),
    -            (Token.Operator, u'='),
    -            (Token.Literal.Number, u'2'),
    -            (Token.Text, u' '),
    -            (Token.Comment.Single, u'# comment\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -class BashSessionTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = BashSessionLexer()
    -        self.maxDiff = None
    -
    -    def testNeedsName(self):
    -        fragment = u'$ echo \\\nhi\nhi\n'
    -        tokens = [
    -            (Token.Text, u''),
    -            (Token.Generic.Prompt, u'$'),
    -            (Token.Text, u' '),
    -            (Token.Name.Builtin, u'echo'),
    -            (Token.Text, u' '),
    -            (Token.Literal.String.Escape, u'\\\n'),
    -            (Token.Text, u'hi'),
    -            (Token.Text, u'\n'),
    -            (Token.Generic.Output, u'hi\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    -class MSDOSSessionTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = MSDOSSessionLexer()
    -
    -    def testGtOnlyPrompt(self):
    -        fragment = u'> py\nhi\n'
    -        tokens = [
    -            (Token.Text, u''),
    -            (Token.Generic.Prompt, u'>'),
    -            (Token.Text, u' '),
    -            (Token.Text, u'py'),
    -            (Token.Text, u''),
    -            (Token.Text, u'\n'),
    -            (Token.Generic.Output, u'hi\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tests/test_smarty.py b/tests/test_smarty.py
    deleted file mode 100644
    index fb15f7f4..00000000
    --- a/tests/test_smarty.py
    +++ /dev/null
    @@ -1,40 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Basic SmartyLexer Test
    -    ~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Operator, Number, Text, Token
    -from pygments.lexers import SmartyLexer
    -
    -
    -class SmartyTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = SmartyLexer()
    -
    -    def testNestedCurly(self):
    -        fragment = u'{templateFunction param={anotherFunction} param2=$something}\n'
    -        tokens = [
    -            (Token.Comment.Preproc, u'{'),
    -            (Token.Name.Function, u'templateFunction'),
    -            (Token.Text, u' '),
    -            (Token.Name.Attribute, u'param'),
    -            (Token.Operator, u'='),
    -            (Token.Comment.Preproc, u'{'),
    -            (Token.Name.Attribute, u'anotherFunction'),
    -            (Token.Comment.Preproc, u'}'),
    -            (Token.Text, u' '),
    -            (Token.Name.Attribute, u'param2'),
    -            (Token.Operator, u'='),
    -            (Token.Name.Variable, u'$something'),
    -            (Token.Comment.Preproc, u'}'),
    -            (Token.Other, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    -
    diff --git a/tests/test_sql.py b/tests/test_sql.py
    deleted file mode 100644
    index 6be34006..00000000
    --- a/tests/test_sql.py
    +++ /dev/null
    @@ -1,118 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments SQL lexers tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -import unittest
    -
    -from pygments.lexers.sql import name_between_bracket_re, \
    -    name_between_backtick_re, tsql_go_re, tsql_declare_re, \
    -    tsql_variable_re, MySqlLexer, SqlLexer, TransactSqlLexer
    -
    -from pygments.token import Comment, Name, Number, Punctuation, Whitespace
    -
    -
    -class TransactSqlLexerTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = TransactSqlLexer()
    -
    -    def _assertAreTokensOfType(self, examples, expected_token_type):
    -        for test_number, example in enumerate(examples.split(), 1):
    -            token_count = 0
    -            for token_type, token_value in self.lexer.get_tokens(example):
    -                if token_type != Whitespace:
    -                    token_count += 1
    -                    self.assertEqual(
    -                        token_type, expected_token_type,
    -                        'token_type #%d for %s is be %s but must be %s' %
    -                        (test_number, token_value, token_type, expected_token_type))
    -            self.assertEqual(
    -                token_count, 1,
    -                '%s must yield exactly 1 token instead of %d' %
    -                (example, token_count))
    -
    -    def _assertTokensMatch(self, text, expected_tokens_without_trailing_newline):
    -        actual_tokens = tuple(self.lexer.get_tokens(text))
    -        if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
    -            actual_tokens = tuple(actual_tokens[:-1])
    -        self.assertEqual(
    -            expected_tokens_without_trailing_newline, actual_tokens,
    -            'text must yield expected tokens: %s' % text)
    -
    -    def test_can_lex_float(self):
    -        self._assertAreTokensOfType(
    -            '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2', Number.Float)
    -        self._assertTokensMatch(
    -            '1e2.1e2',
    -            ((Number.Float, '1e2'), (Number.Float, '.1e2'))
    -        )
    -
    -    def test_can_reject_almost_float(self):
    -        self._assertTokensMatch(
    -            '.e1',
    -            ((Punctuation, '.'), (Name, 'e1')))
    -
    -    def test_can_lex_integer(self):
    -        self._assertAreTokensOfType(
    -            '1 23 456', Number.Integer)
    -
    -    def test_can_lex_names(self):
    -        self._assertAreTokensOfType(
    -            u'thingy thingy123 _thingy _ _123 Ähnliches Müll #temp1 ##temp2', Name)
    -
    -    def test_can_lex_comments(self):
    -        self._assertTokensMatch('--\n', ((Comment.Single, '--\n'),))
    -        self._assertTokensMatch('/**/', (
    -            (Comment.Multiline, '/*'), (Comment.Multiline, '*/')
    -        ))
    -        self._assertTokensMatch('/*/**/*/', (
    -            (Comment.Multiline, '/*'),
    -            (Comment.Multiline, '/*'),
    -            (Comment.Multiline, '*/'),
    -            (Comment.Multiline, '*/'),
    -        ))
    -
    -
    -class SqlAnalyzeTextTest(unittest.TestCase):
    -    def test_can_match_analyze_text_res(self):
    -        self.assertEqual(['`a`', '`bc`'],
    -            name_between_backtick_re.findall('select `a`, `bc` from some'))
    -        self.assertEqual(['[a]', '[bc]'],
    -            name_between_bracket_re.findall('select [a], [bc] from some'))
    -        self.assertTrue(tsql_declare_re.search('--\nDeClaRe @some int;'))
    -        self.assertTrue(tsql_go_re.search('select 1\ngo\n--'))
    -        self.assertTrue(tsql_variable_re.search(
    -            'create procedure dbo.usp_x @a int, @b int'))
    -
    -    def test_can_analyze_text(self):
    -        mysql_lexer = MySqlLexer()
    -        sql_lexer = SqlLexer()
    -        tsql_lexer = TransactSqlLexer()
    -        code_to_expected_lexer_map = {
    -            'select `a`, `bc` from some': mysql_lexer,
    -            'select a, bc from some': sql_lexer,
    -            'select [a], [bc] from some': tsql_lexer,
    -            '-- `a`, `bc`\nselect [a], [bc] from some': tsql_lexer,
    -            '-- `a`, `bc`\nselect [a], [bc] from some; go': tsql_lexer,
    -        }
    -        sql_lexers = set(code_to_expected_lexer_map.values())
    -        for code, expected_lexer in code_to_expected_lexer_map.items():
    -            ratings_and_lexers = list((lexer.analyse_text(code), lexer.name) for lexer in sql_lexers)
    -            best_rating, best_lexer_name  = sorted(ratings_and_lexers, reverse=True)[0]
    -            expected_rating = expected_lexer.analyse_text(code)
    -            message = (
    -                'lexer must be %s (rating %.2f) instead of '
    -                '%s (rating %.2f) for analyse_text() on code:\n%s') % (
    -                expected_lexer.name,
    -                expected_rating,
    -                best_lexer_name,
    -                best_rating,
    -                code
    -            )
    -            self.assertEqual(
    -                expected_lexer.name, best_lexer_name, message
    -            )
    diff --git a/tests/test_string_asserts.py b/tests/test_string_asserts.py
    deleted file mode 100644
    index 737ba200..00000000
    --- a/tests/test_string_asserts.py
    +++ /dev/null
    @@ -1,35 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments string assert utility tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -from string_asserts import StringTests
    -
    -class TestStringTests(StringTests, unittest.TestCase):
    -
    -    def test_startswith_correct(self):
    -        self.assertStartsWith("AAA", "A")
    -
    -    # @unittest.expectedFailure not supported by nose
    -    def test_startswith_incorrect(self):
    -        self.assertRaises(AssertionError, self.assertStartsWith, "AAA", "B")
    -
    -    # @unittest.expectedFailure not supported by nose
    -    def test_startswith_short(self):
    -        self.assertRaises(AssertionError, self.assertStartsWith, "A", "AA")
    -
    -    def test_endswith_correct(self):
    -        self.assertEndsWith("AAA", "A")
    -
    -    # @unittest.expectedFailure not supported by nose
    -    def test_endswith_incorrect(self):
    -        self.assertRaises(AssertionError, self.assertEndsWith, "AAA", "B")
    -
    -    # @unittest.expectedFailure not supported by nose
    -    def test_endswith_short(self):
    -        self.assertRaises(AssertionError, self.assertEndsWith, "A", "AA")
    diff --git a/tests/test_terminal_formatter.py b/tests/test_terminal_formatter.py
    deleted file mode 100644
    index e5a13431..00000000
    --- a/tests/test_terminal_formatter.py
    +++ /dev/null
    @@ -1,102 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments terminal formatter tests
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -from __future__ import print_function
    -
    -import unittest
    -import re
    -
    -from pygments.util import StringIO
    -from pygments.lexers.sql import PlPgsqlLexer
    -from pygments.formatters import TerminalFormatter, Terminal256Formatter, \
    -    HtmlFormatter, LatexFormatter
    -
    -from pygments.style import Style
    -from pygments.token import Token
    -from pygments.lexers import Python3Lexer
    -from pygments import highlight
    -
    -DEMO_TEXT = '''\
    --- comment
    -select
    -* from bar;
    -'''
    -DEMO_LEXER = PlPgsqlLexer
    -DEMO_TOKENS = list(DEMO_LEXER().get_tokens(DEMO_TEXT))
    -
    -ANSI_RE = re.compile(r'\x1b[\w\W]*?m')
    -
    -
    -def strip_ansi(x):
    -    return ANSI_RE.sub('', x)
    -
    -
    -class TerminalFormatterTest(unittest.TestCase):
    -    def test_reasonable_output(self):
    -        out = StringIO()
    -        TerminalFormatter().format(DEMO_TOKENS, out)
    -        plain = strip_ansi(out.getvalue())
    -        self.assertEqual(DEMO_TEXT.count('\n'), plain.count('\n'))
    -        print(repr(plain))
    -
    -        for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
    -            self.assertEqual(a, b)
    -
    -    def test_reasonable_output_lineno(self):
    -        out = StringIO()
    -        TerminalFormatter(linenos=True).format(DEMO_TOKENS, out)
    -        plain = strip_ansi(out.getvalue())
    -        self.assertEqual(DEMO_TEXT.count('\n') + 1, plain.count('\n'))
    -        print(repr(plain))
    -
    -        for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
    -            self.assertTrue(a in b)
    -
    -
    -class MyStyle(Style):
    -    styles = {
    -        Token.Comment:    'ansibrightblack',
    -        Token.String:     'ansibrightblue bg:ansired',
    -        Token.Number:     'ansibrightgreen bg:ansigreen',
    -        Token.Number.Hex: 'ansigreen bg:ansibrightred',
    -    }
    -
    -
    -class Terminal256FormatterTest(unittest.TestCase):
    -    code = '''
    -# this should be a comment
    -print("Hello World")
    -async def function(a,b,c, *d, **kwarg:Bool)->Bool:
    -    pass
    -    return 123, 0xb3e3
    -
    -'''
    -
    -    def test_style_html(self):
    -        style = HtmlFormatter(style=MyStyle).get_style_defs()
    -        self.assertTrue('#555555' in style,
    -                        "ansigray for comment not html css style")
    -
    -    def test_others_work(self):
    -        """check other formatters don't crash"""
    -        highlight(self.code, Python3Lexer(), LatexFormatter(style=MyStyle))
    -        highlight(self.code, Python3Lexer(), HtmlFormatter(style=MyStyle))
    -
    -    def test_256esc_seq(self):
    -        """
    -        test that a few escape sequences are actually used when using ansi<> color codes
    -        """
    -        def termtest(x):
    -            return highlight(x, Python3Lexer(),
    -                             Terminal256Formatter(style=MyStyle))
    -
    -        self.assertTrue('32;101' in termtest('0x123'))
    -        self.assertTrue('92;42' in termtest('123'))
    -        self.assertTrue('90' in termtest('#comment'))
    -        self.assertTrue('94;41' in termtest('"String"'))
    diff --git a/tests/test_textfmts.py b/tests/test_textfmts.py
    deleted file mode 100644
    index 8a1b8eda..00000000
    --- a/tests/test_textfmts.py
    +++ /dev/null
    @@ -1,41 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Basic Tests for textfmts
    -    ~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.token import Operator, Number, Text, Token
    -from pygments.lexers.textfmts import HttpLexer
    -
    -
    -class RubyTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.lexer = HttpLexer()
    -        self.maxDiff = None
    -
    -    def testApplicationXml(self):
    -        fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n\n'
    -        tokens = [
    -            (Token.Name.Tag, u''),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(
    -            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
    -
    -    def testApplicationCalendarXml(self):
    -        fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n\n'
    -        tokens = [
    -            (Token.Name.Tag, u''),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(
    -            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
    -
    diff --git a/tests/test_token.py b/tests/test_token.py
    deleted file mode 100644
    index fdbcabd1..00000000
    --- a/tests/test_token.py
    +++ /dev/null
    @@ -1,54 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Test suite for the token module
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import copy
    -import unittest
    -
    -from pygments import token
    -
    -
    -class TokenTest(unittest.TestCase):
    -
    -    def test_tokentype(self):
    -        e = self.assertEqual
    -
    -        t = token.String
    -
    -        e(t.split(), [token.Token, token.Literal, token.String])
    -
    -        e(t.__class__, token._TokenType)
    -
    -    def test_functions(self):
    -        self.assertTrue(token.is_token_subtype(token.String, token.String))
    -        self.assertTrue(token.is_token_subtype(token.String, token.Literal))
    -        self.assertFalse(token.is_token_subtype(token.Literal, token.String))
    -
    -        self.assertTrue(token.string_to_tokentype(token.String) is token.String)
    -        self.assertTrue(token.string_to_tokentype('') is token.Token)
    -        self.assertTrue(token.string_to_tokentype('String') is token.String)
    -
    -    def test_sanity_check(self):
    -        stp = token.STANDARD_TYPES.copy()
    -        stp[token.Token] = '---' # Token and Text do conflict, that is okay
    -        t = {}
    -        for k, v in stp.items():
    -            t.setdefault(v, []).append(k)
    -        if len(t) == len(stp):
    -            return # Okay
    -
    -        for k, v in t.items():
    -            if len(v) > 1:
    -                self.fail("%r has more than one key: %r" % (k, v))
    -
    -    def test_copying(self):
    -        # Token instances are supposed to be singletons, so copying or even
    -        # deepcopying should return themselves
    -        t = token.String
    -        self.assertIs(t, copy.copy(t))
    -        self.assertIs(t, copy.deepcopy(t))
    diff --git a/tests/test_unistring.py b/tests/test_unistring.py
    deleted file mode 100644
    index 82d74ed6..00000000
    --- a/tests/test_unistring.py
    +++ /dev/null
    @@ -1,48 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Test suite for the unistring module
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import re
    -import unittest
    -import random
    -
    -from pygments import unistring as uni
    -from pygments.util import unichr
    -
    -
    -class UnistringTest(unittest.TestCase):
    -    def test_cats_exist_and_compilable(self):
    -        for cat in uni.cats:
    -            s = getattr(uni, cat)
    -            if s == '':  # Probably Cs on Jython
    -                continue
    -            print("%s %r" % (cat, s))
    -            re.compile('[%s]' % s)
    -
    -    def _cats_that_match(self, c):
    -        matching_cats = []
    -        for cat in uni.cats:
    -            s = getattr(uni, cat)
    -            if s == '':  # Probably Cs on Jython
    -                continue
    -            if re.compile('[%s]' % s).match(c):
    -                matching_cats.append(cat)
    -        return matching_cats
    -
    -    def test_spot_check_types(self):
    -        # Each char should match one, and precisely one, category
    -        random.seed(0)
    -        for i in range(1000):
    -            o = random.randint(0, 65535)
    -            c = unichr(o)
    -            if o > 0xd800 and o <= 0xdfff and not uni.Cs:
    -                continue  # Bah, Jython.
    -            print(hex(o))
    -            cats = self._cats_that_match(c)
    -            self.assertEqual(len(cats), 1,
    -                             "%d (%s): %s" % (o, c, cats))
    diff --git a/tests/test_using_api.py b/tests/test_using_api.py
    deleted file mode 100644
    index 2ab70d09..00000000
    --- a/tests/test_using_api.py
    +++ /dev/null
    @@ -1,40 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Pygments tests for using()
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexer import using, bygroups, this, RegexLexer
    -from pygments.token import String, Text, Keyword
    -
    -class TestLexer(RegexLexer):
    -    tokens = {
    -        'root': [
    -            (r'#.*',
    -             using(this, state='invalid')),
    -            (r'(")(.+?)(")',
    -             bygroups(String, using(this, state='string'), String)),
    -            (r'[^"]+', Text),
    -        ],
    -        'string': [
    -            (r'.+', Keyword),
    -        ],
    -    }
    -
    -
    -class UsingStateTest(unittest.TestCase):
    -    def test_basic(self):
    -        expected = [(Text, 'a'), (String, '"'), (Keyword, 'bcd'),
    -                    (String, '"'), (Text, 'e\n')]
    -        t = list(TestLexer().get_tokens('a"bcd"e'))
    -        self.assertEqual(t, expected)
    -
    -    def test_error(self):
    -        def gen():
    -            return list(TestLexer().get_tokens('#a'))
    -        self.assertRaises(KeyError, gen)
    diff --git a/tests/test_util.py b/tests/test_util.py
    deleted file mode 100644
    index 646a403b..00000000
    --- a/tests/test_util.py
    +++ /dev/null
    @@ -1,213 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Test suite for the util module
    -    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import re
    -import unittest
    -
    -from pygments import util, console
    -
    -
    -class FakeLexer(object):
    -    def analyse(text):
    -        return text
    -    analyse = util.make_analysator(analyse)
    -
    -
    -class UtilTest(unittest.TestCase):
    -
    -    def test_getoptions(self):
    -        raises = self.assertRaises
    -        equals = self.assertEqual
    -
    -        equals(util.get_bool_opt({}, 'a', True), True)
    -        equals(util.get_bool_opt({}, 'a', 1), True)
    -        equals(util.get_bool_opt({}, 'a', 'true'), True)
    -        equals(util.get_bool_opt({}, 'a', 'no'), False)
    -        raises(util.OptionError, util.get_bool_opt, {}, 'a', [])
    -        raises(util.OptionError, util.get_bool_opt, {}, 'a', 'foo')
    -
    -        equals(util.get_int_opt({}, 'a', 1), 1)
    -        raises(util.OptionError, util.get_int_opt, {}, 'a', [])
    -        raises(util.OptionError, util.get_int_opt, {}, 'a', 'bar')
    -
    -        equals(util.get_list_opt({}, 'a', [1]), [1])
    -        equals(util.get_list_opt({}, 'a', '1 2'), ['1', '2'])
    -        raises(util.OptionError, util.get_list_opt, {}, 'a', 1)
    -
    -        equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'bar'), 'bar')
    -        equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'Bar', True), 'bar')
    -        raises(util.OptionError, util.get_choice_opt, {}, 'a',
    -               ['foo', 'bar'], 'baz')
    -
    -    def test_docstring_headline(self):
    -        def f1():
    -            """
    -            docstring headline
    -
    -            other text
    -            """
    -        def f2():
    -            """
    -            docstring
    -            headline
    -
    -            other text
    -            """
    -        def f3():
    -            pass
    -
    -        self.assertEqual(util.docstring_headline(f1), 'docstring headline')
    -        self.assertEqual(util.docstring_headline(f2), 'docstring headline')
    -        self.assertEqual(util.docstring_headline(f3), '')
    -
    -    def test_analysator_returns_float(self):
    -        # If an analysator wrapped by make_analysator returns a floating point
    -        # number, then that number will be returned by the wrapper.
    -        self.assertEqual(FakeLexer.analyse('0.5'), 0.5)
    -
    -    def test_analysator_returns_boolean(self):
    -        # If an analysator wrapped by make_analysator returns a boolean value,
    -        # then the wrapper will return 1.0 if the boolean was True or 0.0 if
    -        # it was False.
    -        self.assertEqual(FakeLexer.analyse(True), 1.0)
    -        self.assertEqual(FakeLexer.analyse(False), 0.0)
    -
    -    def test_analysator_raises_exception(self):
    -        # If an analysator wrapped by make_analysator raises an exception,
    -        # then the wrapper will return 0.0.
    -        class ErrorLexer(object):
    -            def analyse(text):
    -                raise RuntimeError('something bad happened')
    -            analyse = util.make_analysator(analyse)
    -        self.assertEqual(ErrorLexer.analyse(''), 0.0)
    -
    -    def test_analysator_value_error(self):
    -        # When converting the analysator's return value to a float a
    -        # ValueError may occur.  If that happens 0.0 is returned instead.
    -        self.assertEqual(FakeLexer.analyse('bad input'), 0.0)
    -
    -    def test_analysator_type_error(self):
    -        # When converting the analysator's return value to a float a
    -        # TypeError may occur.  If that happens 0.0 is returned instead.
    -        self.assertEqual(FakeLexer.analyse('xxx'), 0.0)
    -
    -    def test_shebang_matches(self):
    -        self.assertTrue(util.shebang_matches('#!/usr/bin/env python\n', r'python(2\.\d)?'))
    -        self.assertTrue(util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?'))
    -        self.assertTrue(util.shebang_matches('#!/usr/bin/startsomethingwith python',
    -                                             r'python(2\.\d)?'))
    -        self.assertTrue(util.shebang_matches('#!C:\\Python2.4\\Python.exe',
    -                                             r'python(2\.\d)?'))
    -
    -        self.assertFalse(util.shebang_matches('#!/usr/bin/python-ruby',
    -                                              r'python(2\.\d)?'))
    -        self.assertFalse(util.shebang_matches('#!/usr/bin/python/ruby',
    -                                              r'python(2\.\d)?'))
    -        self.assertFalse(util.shebang_matches('#!', r'python'))
    -
    -    def test_doctype_matches(self):
    -        self.assertTrue(util.doctype_matches(
    -            ' ', 'html.*'))
    -        self.assertFalse(util.doctype_matches(
    -            '  ', 'html.*'))
    -        self.assertTrue(util.html_doctype_matches(
    -            ''))
    -
    -    def test_xml(self):
    -        self.assertTrue(util.looks_like_xml(
    -            ''))
    -        self.assertTrue(util.looks_like_xml('abc'))
    -        self.assertFalse(util.looks_like_xml(''))
    -
    -    def test_unirange(self):
    -        first_non_bmp = u'\U00010000'
    -        r = re.compile(util.unirange(0x10000, 0x20000))
    -        m = r.match(first_non_bmp)
    -        self.assertTrue(m)
    -        self.assertEqual(m.end(), len(first_non_bmp))
    -        self.assertFalse(r.match(u'\uffff'))
    -        self.assertFalse(r.match(u'xxx'))
    -        # Tests that end is inclusive
    -        r = re.compile(util.unirange(0x10000, 0x10000) + '+')
    -        # Tests that the plus works for the entire unicode point, if narrow
    -        # build
    -        m = r.match(first_non_bmp * 2)
    -        self.assertTrue(m)
    -        self.assertEqual(m.end(), len(first_non_bmp) * 2)
    -
    -    def test_format_lines(self):
    -        lst = ['cat', 'dog']
    -        output = util.format_lines('var', lst)
    -        d = {}
    -        exec(output, d)
    -        self.assertTrue(isinstance(d['var'], tuple))
    -        self.assertEqual(('cat', 'dog'), d['var'])
    -
    -    def test_duplicates_removed_seq_types(self):
    -        # tuple
    -        x = util.duplicates_removed(('a', 'a', 'b'))
    -        self.assertEqual(['a', 'b'], x)
    -        # list
    -        x = util.duplicates_removed(['a', 'a', 'b'])
    -        self.assertEqual(['a', 'b'], x)
    -        # iterator
    -        x = util.duplicates_removed(iter(('a', 'a', 'b')))
    -        self.assertEqual(['a', 'b'], x)
    -
    -    def test_duplicates_removed_nonconsecutive(self):
    -        # keeps first
    -        x = util.duplicates_removed(('a', 'b', 'a'))
    -        self.assertEqual(['a', 'b'], x)
    -
    -    def test_guess_decode(self):
    -        # UTF-8 should be decoded as UTF-8
    -        s = util.guess_decode(u'\xff'.encode('utf-8'))
    -        self.assertEqual(s, (u'\xff', 'utf-8'))
    -
    -        # otherwise, it could be latin1 or the locale encoding...
    -        import locale
    -        s = util.guess_decode(b'\xff')
    -        self.assertTrue(s[1] in ('latin1', locale.getpreferredencoding()))
    -
    -    def test_guess_decode_from_terminal(self):
    -        class Term:
    -            encoding = 'utf-7'
    -
    -        s = util.guess_decode_from_terminal(u'\xff'.encode('utf-7'), Term)
    -        self.assertEqual(s, (u'\xff', 'utf-7'))
    -
    -        s = util.guess_decode_from_terminal(u'\xff'.encode('utf-8'), Term)
    -        self.assertEqual(s, (u'\xff', 'utf-8'))
    -
    -    def test_add_metaclass(self):
    -        class Meta(type):
    -            pass
    -
    -        @util.add_metaclass(Meta)
    -        class Cls:
    -            pass
    -
    -        self.assertEqual(type(Cls), Meta)
    -
    -
    -class ConsoleTest(unittest.TestCase):
    -
    -    def test_ansiformat(self):
    -        f = console.ansiformat
    -        c = console.codes
    -        all_attrs = f('+*_blue_*+', 'text')
    -        self.assertTrue(c['blue'] in all_attrs and c['blink'] in all_attrs
    -                        and c['bold'] in all_attrs and c['underline'] in all_attrs
    -                        and c['reset'] in all_attrs)
    -        self.assertRaises(KeyError, f, '*mauve*', 'text')
    -
    -    def test_functions(self):
    -        self.assertEqual(console.reset_color(), console.codes['reset'])
    -        self.assertEqual(console.colorize('blue', 'text'),
    -                         console.codes['blue'] + 'text' + console.codes['reset'])
    diff --git a/tests/test_whiley.py b/tests/test_whiley.py
    deleted file mode 100644
    index f447ffec..00000000
    --- a/tests/test_whiley.py
    +++ /dev/null
    @@ -1,30 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""
    -    Whiley Test
    -    ~~~~~~~~~~~
    -
    -    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
    -    :license: BSD, see LICENSE for details.
    -"""
    -
    -import unittest
    -
    -from pygments.lexers import WhileyLexer
    -from pygments.token import Token
    -
    -
    -class WhileyTest(unittest.TestCase):
    -    def setUp(self):
    -        self.lexer = WhileyLexer()
    -
    -    def testWhileyOperator(self):
    -        fragment = u'123 \u2200 x\n'
    -        tokens = [
    -            (Token.Literal.Number.Integer, u'123'),
    -            (Token.Text, u' '),
    -            (Token.Operator, u'\u2200'),
    -            (Token.Text, u' '),
    -            (Token.Name, u'x'),
    -            (Token.Text, u'\n'),
    -        ]
    -        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    diff --git a/tox.ini b/tox.ini
    deleted file mode 100644
    index 2c63c292..00000000
    --- a/tox.ini
    +++ /dev/null
    @@ -1,7 +0,0 @@
    -[tox]
    -envlist = py27, py35, py36, py37
    -[testenv]
    -deps =
    -    nose
    -    coverage
    -commands = python -d tests/run.py {posargs}
    -- 
    cgit v1.2.1