From 57ea7dad48fbf2541c04e434c31bde655ada3ac4 Mon Sep 17 00:00:00 2001 From: Taco Hoekwater Date: Mon, 24 May 2010 14:05:02 +0000 Subject: here is context 2010.05.24 13:05 git-svn-id: svn://tug.org/texlive/trunk@18445 c570f23f-e606-0410-a88d-b1316a301751 --- .../context/data/scite/cont-cs-scite.properties | 98 +- .../context/data/scite/cont-de-scite.properties | 98 +- .../context/data/scite/cont-en-scite.properties | 98 +- .../context/data/scite/cont-fr-scite.properties | 98 +- .../context/data/scite/cont-it-scite.properties | 98 +- .../context/data/scite/cont-nl-scite.properties | 98 +- .../context/data/scite/cont-pe-scite.properties | 98 +- .../context/data/scite/cont-ro-scite.properties | 98 +- .../context/data/scite/context.properties | 27 +- Master/texmf-dist/context/data/scite/scite-ctx.lua | 838 + .../context/data/scite/scite-ctx.properties | 3 +- .../texmf-dist/context/data/scite/scite-ctx.readme | 27 +- .../texmf-dist/context/data/textadept/context.lua | 710 + .../context/data/texworks/TUG/TeXworks.ini | 18 + .../context/data/texworks/TUG/texworks.ini | 45 - .../context/data/texworks/configuration/tools.ini | 35 + .../texmf-dist/fonts/enc/dvips/context/ec-2004.enc | 82 + .../fonts/map/pdftex/context/mkiv-base.map | 252 + .../fonts/map/pdftex/context/mkiv-px.map | 51 + .../fonts/map/pdftex/context/mkiv-tx.map | 94 + .../map/pdftex/context/original-public-lm.map | 78 +- Master/texmf-dist/metapost/base/mfplain.mp | 2 +- Master/texmf-dist/metapost/context/base/mp-back.mp | 232 +- Master/texmf-dist/metapost/context/base/mp-chem.mp | 104 +- Master/texmf-dist/metapost/context/base/mp-page.mp | 27 + Master/texmf-dist/metapost/context/base/mp-text.mp | 3 + Master/texmf-dist/metapost/context/base/mp-tool.mp | 2 + .../texmf-dist/metapost/context/font/punkfont.mp | 15 +- Master/texmf-dist/scripts/context/lua/luatools.lua | 2232 +- Master/texmf-dist/scripts/context/lua/luatools.rme | 2 +- .../texmf-dist/scripts/context/lua/mtx-babel.lua | 2 +- .../texmf-dist/scripts/context/lua/mtx-cache.lua | 19 +- .../texmf-dist/scripts/context/lua/mtx-chars.lua | 26 +- .../texmf-dist/scripts/context/lua/mtx-check.lua | 12 +- .../texmf-dist/scripts/context/lua/mtx-context.lua | 355 +- .../texmf-dist/scripts/context/lua/mtx-convert.lua | 10 +- .../texmf-dist/scripts/context/lua/mtx-fonts.lua | 312 +- Master/texmf-dist/scripts/context/lua/mtx-grep.lua | 4 +- .../scripts/context/lua/mtx-interface.lua | 30 +- .../texmf-dist/scripts/context/lua/mtx-metatex.lua | 2 +- .../texmf-dist/scripts/context/lua/mtx-modules.lua | 167 + .../texmf-dist/scripts/context/lua/mtx-mptopdf.lua | 99 +- .../scripts/context/lua/mtx-mtxworks.lua | 14 + .../texmf-dist/scripts/context/lua/mtx-package.lua | 2 +- .../scripts/context/lua/mtx-patterns.lua | 46 +- .../texmf-dist/scripts/context/lua/mtx-profile.lua | 16 +- .../texmf-dist/scripts/context/lua/mtx-scite.lua | 166 + .../context/lua/mtx-server-ctx-fonttest.lua | 302 +- .../scripts/context/lua/mtx-server-ctx-help.lua | 207 +- .../scripts/context/lua/mtx-server-ctx-startup.lua | 32 +- .../texmf-dist/scripts/context/lua/mtx-server.lua | 11 +- .../scripts/context/lua/mtx-texworks.lua | 33 +- .../texmf-dist/scripts/context/lua/mtx-timing.lua | 80 +- .../texmf-dist/scripts/context/lua/mtx-tools.lua | 129 +- .../texmf-dist/scripts/context/lua/mtx-update.lua | 175 +- .../texmf-dist/scripts/context/lua/mtx-watch.lua | 401 +- Master/texmf-dist/scripts/context/lua/mtxrun.lua | 7529 +- Master/texmf-dist/scripts/context/lua/mtxrun.rme | 21 +- .../texmf-dist/scripts/context/lua/scite-ctx.lua | 843 - Master/texmf-dist/scripts/context/lua/x-ldx.lua | 96 +- Master/texmf-dist/scripts/context/perl/mptopdf.pl | 8 +- Master/texmf-dist/scripts/context/perl/texshow.pl | 956 - Master/texmf-dist/scripts/context/ruby/base/ctx.rb | 5 + Master/texmf-dist/scripts/context/ruby/base/tex.rb | 17 +- .../texmf-dist/scripts/context/ruby/newimgtopdf.rb | 8 - .../texmf-dist/scripts/context/ruby/newpstopdf.rb | 8 - .../texmf-dist/scripts/context/ruby/newtexexec.rb | 8 - .../texmf-dist/scripts/context/ruby/newtexutil.rb | 8 - .../scripts/context/stubs/mswin/context.cmd | 5 - .../scripts/context/stubs/mswin/context.exe | Bin 0 -> 6144 bytes .../scripts/context/stubs/mswin/ctxtools.bat | 5 - .../scripts/context/stubs/mswin/luatools.cmd | 5 - .../scripts/context/stubs/mswin/luatools.exe | Bin 0 -> 6144 bytes .../scripts/context/stubs/mswin/luatools.lua | 2232 +- .../scripts/context/stubs/mswin/makempy.bat | 5 - .../scripts/context/stubs/mswin/metatex.cmd | 5 - .../scripts/context/stubs/mswin/metatex.exe | Bin 0 -> 6144 bytes .../scripts/context/stubs/mswin/mpstools.bat | 5 - .../scripts/context/stubs/mswin/mptopdf.bat | 5 - .../scripts/context/stubs/mswin/mtxrun.cmd | 5 - .../scripts/context/stubs/mswin/mtxrun.dll | Bin 0 -> 9216 bytes .../scripts/context/stubs/mswin/mtxrun.exe | Bin 0 -> 6144 bytes .../scripts/context/stubs/mswin/mtxrun.lua | 7529 +- .../scripts/context/stubs/mswin/mtxtools.bat | 5 - .../scripts/context/stubs/mswin/mtxworks.cmd | 1 - .../scripts/context/stubs/mswin/pdftools.bat | 5 - .../scripts/context/stubs/mswin/pstopdf.bat | 5 - .../scripts/context/stubs/mswin/rlxtools.bat | 5 - .../scripts/context/stubs/mswin/runtools.bat | 5 - .../scripts/context/stubs/mswin/texexec.bat | 5 - .../scripts/context/stubs/mswin/texexec.cmd | 5 - .../scripts/context/stubs/mswin/texexec.exe | Bin 0 -> 6144 bytes .../scripts/context/stubs/mswin/texfont.bat | 5 - .../scripts/context/stubs/mswin/texmfstart.cmd | 5 - .../scripts/context/stubs/mswin/texmfstart.exe | Bin 0 -> 6144 bytes .../scripts/context/stubs/mswin/textools.bat | 5 - .../scripts/context/stubs/mswin/texutil.bat | 5 - .../scripts/context/stubs/mswin/tmftools.bat | 5 - .../scripts/context/stubs/mswin/xmltools.bat | 5 - .../scripts/context/stubs/source/mtxrun_dll.c | 221 + .../scripts/context/stubs/source/mtxrun_exe.c | 8 + .../scripts/context/stubs/source/readme.txt | 36 + .../texmf-dist/scripts/context/stubs/unix/ctxtools | 2 - .../texmf-dist/scripts/context/stubs/unix/luatools | 2232 +- .../texmf-dist/scripts/context/stubs/unix/makempy | 2 - .../texmf-dist/scripts/context/stubs/unix/mpstools | 2 - .../texmf-dist/scripts/context/stubs/unix/mptopdf | 2 - .../texmf-dist/scripts/context/stubs/unix/mtxrun | 7529 +- .../texmf-dist/scripts/context/stubs/unix/mtxtools | 2 - .../texmf-dist/scripts/context/stubs/unix/mtxworks | 2 - .../texmf-dist/scripts/context/stubs/unix/pdftools | 2 - .../texmf-dist/scripts/context/stubs/unix/pstopdf | 2 - .../texmf-dist/scripts/context/stubs/unix/rlxtools | 2 - .../texmf-dist/scripts/context/stubs/unix/runtools | 2 - .../texmf-dist/scripts/context/stubs/unix/texexec | 2 +- .../texmf-dist/scripts/context/stubs/unix/texfont | 2 - .../texmf-dist/scripts/context/stubs/unix/textools | 2 - .../texmf-dist/scripts/context/stubs/unix/texutil | 2 - .../texmf-dist/scripts/context/stubs/unix/tmftools | 2 - .../texmf-dist/scripts/context/stubs/unix/xmltools | 2 - Master/texmf-dist/tex/context/base/anch-bar.mkiv | 41 +- Master/texmf-dist/tex/context/base/anch-pgr.mkiv | 38 +- Master/texmf-dist/tex/context/base/anch-pos.lua | 109 +- Master/texmf-dist/tex/context/base/anch-pos.mkiv | 24 +- Master/texmf-dist/tex/context/base/anch-snc.mkiv | 2 +- Master/texmf-dist/tex/context/base/attr-ini.lua | 157 +- Master/texmf-dist/tex/context/base/attr-ini.mkiv | 74 +- Master/texmf-dist/tex/context/base/back-ini.lua | 4 +- Master/texmf-dist/tex/context/base/back-ini.mkiv | 2 +- Master/texmf-dist/tex/context/base/back-pdf.lua | 36 +- Master/texmf-dist/tex/context/base/back-pdf.mkiv | 55 +- Master/texmf-dist/tex/context/base/back-u3d.mkiv | 156 + Master/texmf-dist/tex/context/base/bibl-bib.lua | 605 +- Master/texmf-dist/tex/context/base/bibl-bib.mkiv | 943 +- Master/texmf-dist/tex/context/base/bibl-tra.lua | 194 + Master/texmf-dist/tex/context/base/bibl-tra.mkii | 1778 + Master/texmf-dist/tex/context/base/bibl-tra.mkiv | 1552 + Master/texmf-dist/tex/context/base/bibl-tst.lua | 2 +- Master/texmf-dist/tex/context/base/blob-ini.lua | 165 + Master/texmf-dist/tex/context/base/blob-ini.mkiv | 34 + Master/texmf-dist/tex/context/base/buff-ini.lua | 382 +- Master/texmf-dist/tex/context/base/buff-ini.mkii | 8 +- Master/texmf-dist/tex/context/base/buff-ini.mkiv | 54 +- Master/texmf-dist/tex/context/base/buff-ver.mkii | 10 +- Master/texmf-dist/tex/context/base/buff-ver.mkiv | 290 +- Master/texmf-dist/tex/context/base/bxml-apa.mkiv | 613 + Master/texmf-dist/tex/context/base/catc-ctx.tex | 16 +- Master/texmf-dist/tex/context/base/catc-def.tex | 12 +- Master/texmf-dist/tex/context/base/catc-ini.lua | 9 +- Master/texmf-dist/tex/context/base/catc-ini.mkiv | 57 +- Master/texmf-dist/tex/context/base/char-act.mkiv | 13 + Master/texmf-dist/tex/context/base/char-cmp.lua | 3 +- Master/texmf-dist/tex/context/base/char-def.lua | 240 +- Master/texmf-dist/tex/context/base/char-enc.lua | 2 +- Master/texmf-dist/tex/context/base/char-ini.lua | 102 +- Master/texmf-dist/tex/context/base/char-ini.mkiv | 2 + Master/texmf-dist/tex/context/base/char-map.lua | 2 +- Master/texmf-dist/tex/context/base/char-tex.lua | 89 + Master/texmf-dist/tex/context/base/char-utf.lua | 2 +- Master/texmf-dist/tex/context/base/char-utf.mkiv | 2 +- Master/texmf-dist/tex/context/base/chem-ini.lua | 9 +- Master/texmf-dist/tex/context/base/chem-ini.mkiv | 2 +- Master/texmf-dist/tex/context/base/chem-str.lua | 24 +- Master/texmf-dist/tex/context/base/chem-str.mkiv | 15 +- Master/texmf-dist/tex/context/base/colo-ext.mkii | 10 +- Master/texmf-dist/tex/context/base/colo-ext.mkiv | 60 +- Master/texmf-dist/tex/context/base/colo-hex.mkii | 4 +- Master/texmf-dist/tex/context/base/colo-hex.mkiv | 2 + Master/texmf-dist/tex/context/base/colo-ini.lua | 255 +- Master/texmf-dist/tex/context/base/colo-ini.mkiv | 133 +- Master/texmf-dist/tex/context/base/colo-new.mkii | 1857 - Master/texmf-dist/tex/context/base/colo-new.mkiv | 331 - Master/texmf-dist/tex/context/base/colo-new.tex | 1051 - Master/texmf-dist/tex/context/base/colo-x11.tex | 677 + Master/texmf-dist/tex/context/base/cont-cz.tex | 35 - Master/texmf-dist/tex/context/base/cont-fil.tex | 3 +- Master/texmf-dist/tex/context/base/cont-log.tex | 66 +- Master/texmf-dist/tex/context/base/cont-new.mkii | 3 + Master/texmf-dist/tex/context/base/cont-new.mkiv | 116 +- Master/texmf-dist/tex/context/base/cont-new.tex | 2 +- Master/texmf-dist/tex/context/base/cont-uk.tex | 38 - .../texmf-dist/tex/context/base/context-base.lmx | 68 +- .../tex/context/base/context-characters.lmx | 2 +- .../texmf-dist/tex/context/base/context-debug.lmx | 2 +- .../texmf-dist/tex/context/base/context-error.lmx | 2 +- .../tex/context/base/context-fonttest.lmx | 2 +- .../texmf-dist/tex/context/base/context-help.lmx | 2 +- .../texmf-dist/tex/context/base/context-timing.lmx | 2 +- Master/texmf-dist/tex/context/base/context.mkii | 15 +- Master/texmf-dist/tex/context/base/context.mkiv | 54 +- Master/texmf-dist/tex/context/base/context.tex | 2 +- Master/texmf-dist/tex/context/base/core-bar.tex | 194 - Master/texmf-dist/tex/context/base/core-blk.tex | 676 - Master/texmf-dist/tex/context/base/core-box.tex | 953 - Master/texmf-dist/tex/context/base/core-con.lua | 176 +- Master/texmf-dist/tex/context/base/core-con.mkiv | 4 +- Master/texmf-dist/tex/context/base/core-ctx.lua | 25 +- Master/texmf-dist/tex/context/base/core-ctx.mkiv | 4 +- Master/texmf-dist/tex/context/base/core-def.mkii | 21 + Master/texmf-dist/tex/context/base/core-def.mkiv | 38 +- Master/texmf-dist/tex/context/base/core-env.mkiv | 170 +- Master/texmf-dist/tex/context/base/core-fil.mkiv | 55 +- Master/texmf-dist/tex/context/base/core-fnt.mkiv | 183 +- Master/texmf-dist/tex/context/base/core-grd.mkii | 1074 - Master/texmf-dist/tex/context/base/core-grd.mkiv | 1074 - Master/texmf-dist/tex/context/base/core-ini.mkiv | 2 +- Master/texmf-dist/tex/context/base/core-job.lua | 52 +- Master/texmf-dist/tex/context/base/core-job.mkii | 5 + Master/texmf-dist/tex/context/base/core-job.mkiv | 58 +- Master/texmf-dist/tex/context/base/core-mis.mkiv | 121 +- Master/texmf-dist/tex/context/base/core-spa.lua | 720 - Master/texmf-dist/tex/context/base/core-spa.mkii | 4658 -- Master/texmf-dist/tex/context/base/core-spa.mkiv | 4277 -- Master/texmf-dist/tex/context/base/core-sys.lua | 22 + Master/texmf-dist/tex/context/base/core-sys.mkiv | 149 +- Master/texmf-dist/tex/context/base/core-two.lua | 8 +- Master/texmf-dist/tex/context/base/core-two.mkiv | 2 +- Master/texmf-dist/tex/context/base/core-uti.lua | 33 +- Master/texmf-dist/tex/context/base/core-uti.mkiv | 2 + Master/texmf-dist/tex/context/base/core-var.mkiv | 15 +- Master/texmf-dist/tex/context/base/data-aux.lua | 20 +- Master/texmf-dist/tex/context/base/data-bin.lua | 9 +- Master/texmf-dist/tex/context/base/data-con.lua | 6 +- Master/texmf-dist/tex/context/base/data-crl.lua | 10 +- Master/texmf-dist/tex/context/base/data-ctx.lua | 6 +- Master/texmf-dist/tex/context/base/data-gen.lua | 2 +- Master/texmf-dist/tex/context/base/data-inp.lua | 2 +- Master/texmf-dist/tex/context/base/data-lst.lua | 15 +- Master/texmf-dist/tex/context/base/data-lua.lua | 125 +- Master/texmf-dist/tex/context/base/data-out.lua | 2 +- Master/texmf-dist/tex/context/base/data-pre.lua | 25 +- Master/texmf-dist/tex/context/base/data-res.lua | 533 +- Master/texmf-dist/tex/context/base/data-sch.lua | 142 + Master/texmf-dist/tex/context/base/data-tex.lua | 37 +- Master/texmf-dist/tex/context/base/data-tmf.lua | 43 +- Master/texmf-dist/tex/context/base/data-tmp.lua | 8 +- Master/texmf-dist/tex/context/base/data-tre.lua | 12 +- Master/texmf-dist/tex/context/base/data-use.lua | 28 +- Master/texmf-dist/tex/context/base/data-zip.lua | 78 +- Master/texmf-dist/tex/context/base/enco-def.mkii | 2 + Master/texmf-dist/tex/context/base/enco-ffr.mkii | 4 + Master/texmf-dist/tex/context/base/enco-ini.mkii | 2 +- Master/texmf-dist/tex/context/base/enco-ini.mkiv | 144 +- Master/texmf-dist/tex/context/base/enco-l7x.mkii | 236 + Master/texmf-dist/tex/context/base/font-afm.lua | 133 +- Master/texmf-dist/tex/context/base/font-agl.lua | 3695 + Master/texmf-dist/tex/context/base/font-chk.lua | 16 +- Master/texmf-dist/tex/context/base/font-cid.lua | 13 +- Master/texmf-dist/tex/context/base/font-col.lua | 5 +- Master/texmf-dist/tex/context/base/font-col.mkiv | 4 +- Master/texmf-dist/tex/context/base/font-ctx.lua | 314 +- Master/texmf-dist/tex/context/base/font-def.lua | 140 +- Master/texmf-dist/tex/context/base/font-dum.lua | 221 +- Master/texmf-dist/tex/context/base/font-enc.lua | 4 +- Master/texmf-dist/tex/context/base/font-enh.lua | 201 + Master/texmf-dist/tex/context/base/font-ext.lua | 440 +- Master/texmf-dist/tex/context/base/font-fbk.lua | 2 +- Master/texmf-dist/tex/context/base/font-gds.lua | 294 + Master/texmf-dist/tex/context/base/font-gds.mkiv | 83 + Master/texmf-dist/tex/context/base/font-ini.lua | 58 +- Master/texmf-dist/tex/context/base/font-ini.mkii | 30 +- Master/texmf-dist/tex/context/base/font-ini.mkiv | 691 +- Master/texmf-dist/tex/context/base/font-log.lua | 4 +- Master/texmf-dist/tex/context/base/font-map.lua | 390 +- Master/texmf-dist/tex/context/base/font-mis.lua | 28 +- Master/texmf-dist/tex/context/base/font-ota.lua | 38 +- Master/texmf-dist/tex/context/base/font-otb.lua | 228 +- Master/texmf-dist/tex/context/base/font-otc.lua | 29 +- Master/texmf-dist/tex/context/base/font-otd.lua | 2 +- Master/texmf-dist/tex/context/base/font-otf.lua | 654 +- Master/texmf-dist/tex/context/base/font-oti.lua | 2 +- Master/texmf-dist/tex/context/base/font-otn.lua | 630 +- Master/texmf-dist/tex/context/base/font-otp.lua | 156 +- Master/texmf-dist/tex/context/base/font-ott.lua | 19 +- Master/texmf-dist/tex/context/base/font-pat.lua | 36 +- Master/texmf-dist/tex/context/base/font-run.mkiv | 45 +- Master/texmf-dist/tex/context/base/font-syn.lua | 1273 +- Master/texmf-dist/tex/context/base/font-tfm.lua | 455 +- Master/texmf-dist/tex/context/base/font-tra.mkiv | 8 +- Master/texmf-dist/tex/context/base/font-vf.lua | 2 +- Master/texmf-dist/tex/context/base/font-xtx.lua | 27 +- Master/texmf-dist/tex/context/base/font-xtx.mkii | 49 +- Master/texmf-dist/tex/context/base/grph-fig.mkiv | 160 +- Master/texmf-dist/tex/context/base/grph-fil.lua | 42 + Master/texmf-dist/tex/context/base/grph-inc.lua | 530 +- Master/texmf-dist/tex/context/base/grph-inc.mkii | 5 +- Master/texmf-dist/tex/context/base/grph-inc.mkiv | 29 +- Master/texmf-dist/tex/context/base/grph-swf.lua | 43 + Master/texmf-dist/tex/context/base/grph-trf.mkii | 4 +- Master/texmf-dist/tex/context/base/grph-trf.mkiv | 21 +- Master/texmf-dist/tex/context/base/grph-u3d.lua | 48 +- Master/texmf-dist/tex/context/base/hand-ini.mkiv | 34 +- Master/texmf-dist/tex/context/base/java-ini.lua | 13 +- Master/texmf-dist/tex/context/base/l-aux.lua | 108 +- Master/texmf-dist/tex/context/base/l-boolean.lua | 2 +- Master/texmf-dist/tex/context/base/l-dimen.lua | 109 +- Master/texmf-dist/tex/context/base/l-dir.lua | 152 +- Master/texmf-dist/tex/context/base/l-file.lua | 148 +- Master/texmf-dist/tex/context/base/l-io.lua | 17 +- Master/texmf-dist/tex/context/base/l-lpeg.lua | 157 +- Master/texmf-dist/tex/context/base/l-math.lua | 2 +- Master/texmf-dist/tex/context/base/l-md5.lua | 2 +- Master/texmf-dist/tex/context/base/l-number.lua | 23 +- Master/texmf-dist/tex/context/base/l-os.lua | 305 +- Master/texmf-dist/tex/context/base/l-pdfview.lua | 95 + Master/texmf-dist/tex/context/base/l-set.lua | 2 +- Master/texmf-dist/tex/context/base/l-string.lua | 83 +- Master/texmf-dist/tex/context/base/l-table.lua | 163 +- Master/texmf-dist/tex/context/base/l-unicode.lua | 39 +- Master/texmf-dist/tex/context/base/l-url.lua | 49 +- Master/texmf-dist/tex/context/base/l-utils.lua | 30 +- Master/texmf-dist/tex/context/base/l-xml.lua | 2442 +- Master/texmf-dist/tex/context/base/lang-alt.tex | 91 +- Master/texmf-dist/tex/context/base/lang-bal.tex | 108 +- Master/texmf-dist/tex/context/base/lang-cjk.mkiv | 14 +- Master/texmf-dist/tex/context/base/lang-ger.tex | 9 + Master/texmf-dist/tex/context/base/lang-ini.lua | 281 +- Master/texmf-dist/tex/context/base/lang-ini.mkii | 42 +- Master/texmf-dist/tex/context/base/lang-ini.mkiv | 54 +- Master/texmf-dist/tex/context/base/lang-ita.tex | 52 +- Master/texmf-dist/tex/context/base/lang-lab.mkiv | 76 +- Master/texmf-dist/tex/context/base/lang-mis.mkiv | 8 +- Master/texmf-dist/tex/context/base/lang-sla.tex | 14 +- Master/texmf-dist/tex/context/base/lang-url.lua | 136 +- Master/texmf-dist/tex/context/base/lang-url.mkiv | 13 +- Master/texmf-dist/tex/context/base/lang-wrd.lua | 225 + Master/texmf-dist/tex/context/base/lang-wrd.mkiv | 54 + Master/texmf-dist/tex/context/base/lpdf-ano.lua | 140 +- Master/texmf-dist/tex/context/base/lpdf-col.lua | 2 +- Master/texmf-dist/tex/context/base/lpdf-fld.lua | 334 +- Master/texmf-dist/tex/context/base/lpdf-grp.lua | 18 +- Master/texmf-dist/tex/context/base/lpdf-ini.lua | 461 +- Master/texmf-dist/tex/context/base/lpdf-ini.mkiv | 15 +- Master/texmf-dist/tex/context/base/lpdf-mis.lua | 133 +- Master/texmf-dist/tex/context/base/lpdf-nod.lua | 8 +- Master/texmf-dist/tex/context/base/lpdf-pdx.lua | 139 + Master/texmf-dist/tex/context/base/lpdf-pdx.mkiv | 71 + Master/texmf-dist/tex/context/base/lpdf-ren.lua | 18 +- Master/texmf-dist/tex/context/base/lpdf-swf.lua | 109 + Master/texmf-dist/tex/context/base/lpdf-u3d.lua | 127 +- Master/texmf-dist/tex/context/base/lpdf-wid.lua | 95 +- Master/texmf-dist/tex/context/base/lpdf-xmp.lua | 165 + Master/texmf-dist/tex/context/base/lpdf-xmp.xml | 43 + Master/texmf-dist/tex/context/base/luat-cbk.lua | 128 +- Master/texmf-dist/tex/context/base/luat-cnf.lua | 18 +- Master/texmf-dist/tex/context/base/luat-cod.mkiv | 2 +- Master/texmf-dist/tex/context/base/luat-dum.lua | 69 +- Master/texmf-dist/tex/context/base/luat-env.lua | 78 +- Master/texmf-dist/tex/context/base/luat-exe.lua | 21 +- Master/texmf-dist/tex/context/base/luat-fio.lua | 6 +- Master/texmf-dist/tex/context/base/luat-ini.lua | 16 +- Master/texmf-dist/tex/context/base/luat-ini.mkiv | 18 +- Master/texmf-dist/tex/context/base/luat-iop.lua | 167 +- Master/texmf-dist/tex/context/base/luat-lib.mkiv | 9 +- Master/texmf-dist/tex/context/base/luat-lua.lua | 5 +- Master/texmf-dist/tex/context/base/luat-run.lua | 19 +- Master/texmf-dist/tex/context/base/luat-sta.lua | 13 +- Master/texmf-dist/tex/context/base/luat-sto.lua | 12 +- Master/texmf-dist/tex/context/base/lxml-aux.lua | 543 + Master/texmf-dist/tex/context/base/lxml-ctx.lua | 131 + Master/texmf-dist/tex/context/base/lxml-ctx.mkiv | 64 + Master/texmf-dist/tex/context/base/lxml-dir.lua | 112 + Master/texmf-dist/tex/context/base/lxml-ent.lua | 124 +- Master/texmf-dist/tex/context/base/lxml-inf.lua | 53 + Master/texmf-dist/tex/context/base/lxml-ini.lua | 1138 - Master/texmf-dist/tex/context/base/lxml-ini.mkiv | 360 +- Master/texmf-dist/tex/context/base/lxml-lpt.lua | 1259 + Master/texmf-dist/tex/context/base/lxml-mis.lua | 57 +- Master/texmf-dist/tex/context/base/lxml-pth.lua | 1555 - Master/texmf-dist/tex/context/base/lxml-sor.lua | 158 + Master/texmf-dist/tex/context/base/lxml-sor.mkiv | 94 + Master/texmf-dist/tex/context/base/lxml-tab.lua | 1025 +- Master/texmf-dist/tex/context/base/lxml-tex.lua | 1410 + Master/texmf-dist/tex/context/base/lxml-xml.lua | 288 + Master/texmf-dist/tex/context/base/m-barcodes.mkiv | 122 + Master/texmf-dist/tex/context/base/m-chart.mkii | 48 + Master/texmf-dist/tex/context/base/m-chart.mkiv | 20 + Master/texmf-dist/tex/context/base/m-chart.tex | 36 +- Master/texmf-dist/tex/context/base/m-chemic.mkiv | 5 +- Master/texmf-dist/tex/context/base/m-database.tex | 60 +- .../texmf-dist/tex/context/base/m-directives.tex | 5 + Master/texmf-dist/tex/context/base/m-graph.mkiv | 2 +- Master/texmf-dist/tex/context/base/m-narrowtt.tex | 2 +- Master/texmf-dist/tex/context/base/m-obsolete.tex | 5 + Master/texmf-dist/tex/context/base/m-pstric.tex | 113 - Master/texmf-dist/tex/context/base/m-pstricks.lua | 73 + Master/texmf-dist/tex/context/base/m-pstricks.mkii | 127 + Master/texmf-dist/tex/context/base/m-pstricks.mkiv | 66 + Master/texmf-dist/tex/context/base/m-pstricks.tex | 16 + Master/texmf-dist/tex/context/base/m-punk.mkiv | 243 + Master/texmf-dist/tex/context/base/m-punk.tex | 233 +- Master/texmf-dist/tex/context/base/m-steps.mkii | 83 + Master/texmf-dist/tex/context/base/m-steps.mkiv | 20 + Master/texmf-dist/tex/context/base/m-steps.tex | 69 - Master/texmf-dist/tex/context/base/m-timing.tex | 26 +- Master/texmf-dist/tex/context/base/m-track.tex | 5 - Master/texmf-dist/tex/context/base/m-trackers.tex | 5 + Master/texmf-dist/tex/context/base/m-translate.tex | 6 +- Master/texmf-dist/tex/context/base/math-ali.mkiv | 273 +- Master/texmf-dist/tex/context/base/math-arr.mkiv | 74 +- Master/texmf-dist/tex/context/base/math-def.mkiv | 128 +- Master/texmf-dist/tex/context/base/math-dim.lua | 17 +- Master/texmf-dist/tex/context/base/math-ent.lua | 2 +- Master/texmf-dist/tex/context/base/math-ext.lua | 37 +- Master/texmf-dist/tex/context/base/math-for.mkiv | 12 +- Master/texmf-dist/tex/context/base/math-ini.lua | 46 +- Master/texmf-dist/tex/context/base/math-ini.mkii | 5 +- Master/texmf-dist/tex/context/base/math-ini.mkiv | 199 +- Master/texmf-dist/tex/context/base/math-inl.mkiv | 339 +- Master/texmf-dist/tex/context/base/math-int.mkiv | 5 +- Master/texmf-dist/tex/context/base/math-lan.mkiv | 67 + Master/texmf-dist/tex/context/base/math-lbr.mkii | 67 +- Master/texmf-dist/tex/context/base/math-map.lua | 287 +- Master/texmf-dist/tex/context/base/math-noa.lua | 78 +- Master/texmf-dist/tex/context/base/math-pln.mkiv | 2 +- Master/texmf-dist/tex/context/base/math-run.mkii | 54 +- Master/texmf-dist/tex/context/base/math-scr.mkiv | 88 +- Master/texmf-dist/tex/context/base/math-tex.mkii | 2 + Master/texmf-dist/tex/context/base/math-vfu.lua | 880 +- Master/texmf-dist/tex/context/base/meta-dum.tex | 2 + Master/texmf-dist/tex/context/base/meta-fig.mkiv | 2 +- Master/texmf-dist/tex/context/base/meta-fun.lua | 2 +- Master/texmf-dist/tex/context/base/meta-ini.mkii | 8 + Master/texmf-dist/tex/context/base/meta-ini.mkiv | 133 +- Master/texmf-dist/tex/context/base/meta-pdf.lua | 20 +- Master/texmf-dist/tex/context/base/meta-pdh.lua | 58 +- Master/texmf-dist/tex/context/base/meta-pdh.mkiv | 2 +- Master/texmf-dist/tex/context/base/meta-tex.mkii | 11 +- Master/texmf-dist/tex/context/base/meta-tex.mkiv | 2 +- Master/texmf-dist/tex/context/base/meta-txt.tex | 24 +- Master/texmf-dist/tex/context/base/metatex.tex | 27 +- Master/texmf-dist/tex/context/base/mlib-ctx.lua | 39 +- Master/texmf-dist/tex/context/base/mlib-pdf.lua | 8 +- Master/texmf-dist/tex/context/base/mlib-pps.lua | 212 +- Master/texmf-dist/tex/context/base/mlib-pps.mkiv | 49 +- Master/texmf-dist/tex/context/base/mlib-run.lua | 19 +- .../tex/context/base/mtx-context-arrange.tex | 12 +- .../tex/context/base/mtx-context-combine.tex | 70 +- .../tex/context/base/mtx-context-common.tex | 43 + .../tex/context/base/mtx-context-ideas.tex | 2 - .../tex/context/base/mtx-context-listing.tex | 74 +- .../tex/context/base/mtx-context-timing.tex | 2 - Master/texmf-dist/tex/context/base/mult-chk.lua | 8 +- Master/texmf-dist/tex/context/base/mult-cld.lua | 170 +- Master/texmf-dist/tex/context/base/mult-cld.mkiv | 11 +- Master/texmf-dist/tex/context/base/mult-clm.lua | 184 + Master/texmf-dist/tex/context/base/mult-de.tex | 69 +- Master/texmf-dist/tex/context/base/mult-def.lua | 335 +- Master/texmf-dist/tex/context/base/mult-en.tex | 73 +- Master/texmf-dist/tex/context/base/mult-fr.tex | 69 +- Master/texmf-dist/tex/context/base/mult-ini.lua | 47 +- Master/texmf-dist/tex/context/base/mult-ini.mkiv | 62 +- Master/texmf-dist/tex/context/base/mult-it.tex | 69 +- Master/texmf-dist/tex/context/base/mult-mcs.tex | 110 +- Master/texmf-dist/tex/context/base/mult-mde.tex | 110 +- Master/texmf-dist/tex/context/base/mult-men.tex | 110 +- Master/texmf-dist/tex/context/base/mult-mes.lua | 43 +- Master/texmf-dist/tex/context/base/mult-mfr.tex | 110 +- Master/texmf-dist/tex/context/base/mult-mit.tex | 110 +- Master/texmf-dist/tex/context/base/mult-mnl.tex | 110 +- Master/texmf-dist/tex/context/base/mult-mno.tex | 110 +- Master/texmf-dist/tex/context/base/mult-mpe.tex | 110 +- Master/texmf-dist/tex/context/base/mult-mro.tex | 110 +- Master/texmf-dist/tex/context/base/mult-nl.tex | 73 +- Master/texmf-dist/tex/context/base/mult-ro.tex | 69 +- Master/texmf-dist/tex/context/base/mult-sys.tex | 75 +- Master/texmf-dist/tex/context/base/node-aux.lua | 145 + Master/texmf-dist/tex/context/base/node-bck.lua | 5 +- Master/texmf-dist/tex/context/base/node-bck.mkiv | 4 +- Master/texmf-dist/tex/context/base/node-dir.lua | 308 + Master/texmf-dist/tex/context/base/node-ext.lua | 2 +- Master/texmf-dist/tex/context/base/node-fin.lua | 8 +- Master/texmf-dist/tex/context/base/node-fin.mkiv | 12 +- Master/texmf-dist/tex/context/base/node-fnt.lua | 11 +- Master/texmf-dist/tex/context/base/node-ini.lua | 50 +- Master/texmf-dist/tex/context/base/node-ini.mkiv | 30 +- Master/texmf-dist/tex/context/base/node-inj.lua | 135 +- Master/texmf-dist/tex/context/base/node-mig.lua | 120 + Master/texmf-dist/tex/context/base/node-mig.mkiv | 60 + Master/texmf-dist/tex/context/base/node-pag.lua | 30 + Master/texmf-dist/tex/context/base/node-pag.mkiv | 20 + Master/texmf-dist/tex/context/base/node-par.lua | 88 +- Master/texmf-dist/tex/context/base/node-par.mkiv | 40 +- Master/texmf-dist/tex/context/base/node-pro.lua | 32 +- Master/texmf-dist/tex/context/base/node-ref.lua | 34 +- Master/texmf-dist/tex/context/base/node-res.lua | 230 +- Master/texmf-dist/tex/context/base/node-rul.lua | 288 + Master/texmf-dist/tex/context/base/node-rul.mkiv | 343 + Master/texmf-dist/tex/context/base/node-seq.lua | 104 +- Master/texmf-dist/tex/context/base/node-ser.lua | 4 +- Master/texmf-dist/tex/context/base/node-shp.lua | 4 +- Master/texmf-dist/tex/context/base/node-tex.lua | 8 +- Master/texmf-dist/tex/context/base/node-tra.lua | 94 +- Master/texmf-dist/tex/context/base/node-tsk.lua | 167 +- Master/texmf-dist/tex/context/base/node-tst.lua | 10 +- Master/texmf-dist/tex/context/base/node-typ.lua | 53 + Master/texmf-dist/tex/context/base/pack-box.mkii | 16 +- Master/texmf-dist/tex/context/base/pack-box.mkiv | 31 +- Master/texmf-dist/tex/context/base/pack-lyr.mkiv | 20 +- Master/texmf-dist/tex/context/base/pack-obj.lua | 2 +- Master/texmf-dist/tex/context/base/pack-obj.mkiv | 3 +- Master/texmf-dist/tex/context/base/pack-rul.lua | 16 +- Master/texmf-dist/tex/context/base/pack-rul.mkiv | 273 +- Master/texmf-dist/tex/context/base/page-app.mkiv | 14 +- Master/texmf-dist/tex/context/base/page-bck.mkiv | 8 +- Master/texmf-dist/tex/context/base/page-flt.lua | 227 + Master/texmf-dist/tex/context/base/page-flt.mkiv | 233 + Master/texmf-dist/tex/context/base/page-flw.mkiv | 6 +- Master/texmf-dist/tex/context/base/page-imp.mkiv | 15 +- Master/texmf-dist/tex/context/base/page-ini.mkii | 2 +- Master/texmf-dist/tex/context/base/page-ini.mkiv | 164 +- Master/texmf-dist/tex/context/base/page-lay.mkiv | 69 +- Master/texmf-dist/tex/context/base/page-lin.lua | 238 +- Master/texmf-dist/tex/context/base/page-lin.mkiv | 286 +- Master/texmf-dist/tex/context/base/page-mak.mkiv | 14 +- Master/texmf-dist/tex/context/base/page-mar.mkiv | 17 +- Master/texmf-dist/tex/context/base/page-mis.mkiv | 127 +- Master/texmf-dist/tex/context/base/page-mul.mkiv | 180 +- Master/texmf-dist/tex/context/base/page-not.mkiv | 11 +- Master/texmf-dist/tex/context/base/page-one.mkiv | 172 +- Master/texmf-dist/tex/context/base/page-par.mkiv | 2 +- Master/texmf-dist/tex/context/base/page-plg.mkiv | 4 +- Master/texmf-dist/tex/context/base/page-set.mkii | 369 +- Master/texmf-dist/tex/context/base/page-set.mkiv | 506 +- Master/texmf-dist/tex/context/base/page-sid.mkiv | 99 +- Master/texmf-dist/tex/context/base/page-spr.mkiv | 4 +- Master/texmf-dist/tex/context/base/page-str.lua | 221 + Master/texmf-dist/tex/context/base/page-str.mkiv | 438 +- Master/texmf-dist/tex/context/base/page-txt.mkiv | 121 +- Master/texmf-dist/tex/context/base/ppchtex.mkii | 101 +- Master/texmf-dist/tex/context/base/ppchtex.mkiv | 113 +- Master/texmf-dist/tex/context/base/pret-lua.lua | 147 +- Master/texmf-dist/tex/context/base/pret-mp.lua | 6 +- Master/texmf-dist/tex/context/base/pret-tex.lua | 6 +- Master/texmf-dist/tex/context/base/pret-xml.lua | 136 + Master/texmf-dist/tex/context/base/prop-ini.mkiv | 22 +- Master/texmf-dist/tex/context/base/prop-lay.mkiv | 6 +- Master/texmf-dist/tex/context/base/prop-mis.mkiv | 46 +- Master/texmf-dist/tex/context/base/regi-8859-1.lua | 46 +- .../texmf-dist/tex/context/base/regi-8859-10.lua | 46 +- .../texmf-dist/tex/context/base/regi-8859-11.lua | 46 +- .../texmf-dist/tex/context/base/regi-8859-13.lua | 46 +- .../texmf-dist/tex/context/base/regi-8859-14.lua | 46 +- .../texmf-dist/tex/context/base/regi-8859-15.lua | 46 +- .../texmf-dist/tex/context/base/regi-8859-16.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-2.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-3.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-4.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-5.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-6.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-7.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-8.lua | 46 +- Master/texmf-dist/tex/context/base/regi-8859-9.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1250.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1251.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1252.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1253.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1254.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1255.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1256.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1257.lua | 46 +- Master/texmf-dist/tex/context/base/regi-cp1258.lua | 46 +- Master/texmf-dist/tex/context/base/regi-ini.lua | 8 +- Master/texmf-dist/tex/context/base/regi-ini.mkiv | 2 +- Master/texmf-dist/tex/context/base/regi-utf.mkii | 1 + Master/texmf-dist/tex/context/base/s-abr-01.tex | 7 +- Master/texmf-dist/tex/context/base/s-chi-00.tex | 2 +- Master/texmf-dist/tex/context/base/s-fnt-10.tex | 1 + Master/texmf-dist/tex/context/base/s-fnt-25.tex | 40 +- Master/texmf-dist/tex/context/base/s-fnt-26.tex | 73 + Master/texmf-dist/tex/context/base/s-inf-01.tex | 203 + Master/texmf-dist/tex/context/base/s-inf-02.tex | 27 + Master/texmf-dist/tex/context/base/s-map-10.tex | 3 +- Master/texmf-dist/tex/context/base/s-mod-00.tex | 11 +- Master/texmf-dist/tex/context/base/s-mod-01.tex | 17 +- Master/texmf-dist/tex/context/base/s-mod.ctx | 24 + Master/texmf-dist/tex/context/base/s-pre-60.tex | 2 +- Master/texmf-dist/tex/context/base/s-pre-64.tex | 2 +- Master/texmf-dist/tex/context/base/s-ptj-01.tex | 166 +- Master/texmf-dist/tex/context/base/scrn-bar.mkiv | 4 +- Master/texmf-dist/tex/context/base/scrn-but.mkiv | 5 +- Master/texmf-dist/tex/context/base/scrn-fld.mkii | 2 + Master/texmf-dist/tex/context/base/scrn-fld.mkiv | 105 +- Master/texmf-dist/tex/context/base/scrn-hlp.mkiv | 46 +- Master/texmf-dist/tex/context/base/scrn-int.lua | 2 +- Master/texmf-dist/tex/context/base/scrn-int.mkii | 24 +- Master/texmf-dist/tex/context/base/scrn-int.mkiv | 67 +- Master/texmf-dist/tex/context/base/scrn-men.mkiv | 19 +- Master/texmf-dist/tex/context/base/scrn-nav.mkiv | 28 +- Master/texmf-dist/tex/context/base/scrp-cjk.lua | 13 +- Master/texmf-dist/tex/context/base/scrp-ini.lua | 4 +- Master/texmf-dist/tex/context/base/scrp-ini.mkiv | 16 +- Master/texmf-dist/tex/context/base/sort-ini.lua | 320 +- Master/texmf-dist/tex/context/base/sort-lan.lua | 402 +- Master/texmf-dist/tex/context/base/sort-lan.mkii | 74 +- Master/texmf-dist/tex/context/base/spac-ali.mkiv | 677 + Master/texmf-dist/tex/context/base/spac-def.mkiv | 118 + Master/texmf-dist/tex/context/base/spac-fnt.mkiv | 94 + Master/texmf-dist/tex/context/base/spac-gen.mkii | 4652 ++ Master/texmf-dist/tex/context/base/spac-grd.mkii | 1074 + Master/texmf-dist/tex/context/base/spac-grd.mkiv | 259 + Master/texmf-dist/tex/context/base/spac-hor.lua | 32 + Master/texmf-dist/tex/context/base/spac-hor.mkiv | 946 + Master/texmf-dist/tex/context/base/spac-pag.mkiv | 199 + Master/texmf-dist/tex/context/base/spac-par.mkiv | 192 + Master/texmf-dist/tex/context/base/spac-ver.lua | 1185 + Master/texmf-dist/tex/context/base/spac-ver.mkiv | 2078 + Master/texmf-dist/tex/context/base/spec-dpm.mkii | 3 +- Master/texmf-dist/tex/context/base/spec-dpx.mkii | 3 +- Master/texmf-dist/tex/context/base/spec-fdf.mkii | 229 +- Master/texmf-dist/tex/context/base/spec-tpd.mkii | 2 +- Master/texmf-dist/tex/context/base/spec-tst.mkii | 102 +- Master/texmf-dist/tex/context/base/strc-bkm.lua | 52 +- Master/texmf-dist/tex/context/base/strc-bkm.mkiv | 5 +- Master/texmf-dist/tex/context/base/strc-blk.lua | 17 +- Master/texmf-dist/tex/context/base/strc-blk.mkiv | 18 +- Master/texmf-dist/tex/context/base/strc-def.mkiv | 19 +- Master/texmf-dist/tex/context/base/strc-des.lua | 2 +- Master/texmf-dist/tex/context/base/strc-des.mkiv | 390 +- Master/texmf-dist/tex/context/base/strc-doc.lua | 163 +- Master/texmf-dist/tex/context/base/strc-doc.mkiv | 84 +- Master/texmf-dist/tex/context/base/strc-flt.lua | 2 +- Master/texmf-dist/tex/context/base/strc-flt.mkiv | 409 +- Master/texmf-dist/tex/context/base/strc-ini.lua | 42 +- Master/texmf-dist/tex/context/base/strc-ini.mkiv | 8 +- Master/texmf-dist/tex/context/base/strc-itm.lua | 2 +- Master/texmf-dist/tex/context/base/strc-itm.mkii | 33 +- Master/texmf-dist/tex/context/base/strc-itm.mkiv | 347 +- Master/texmf-dist/tex/context/base/strc-lnt.mkiv | 54 +- Master/texmf-dist/tex/context/base/strc-lst.lua | 255 +- Master/texmf-dist/tex/context/base/strc-lst.mkiv | 91 +- Master/texmf-dist/tex/context/base/strc-mar.lua | 2 +- Master/texmf-dist/tex/context/base/strc-mar.mkiv | 14 +- Master/texmf-dist/tex/context/base/strc-mat.lua | 2 +- Master/texmf-dist/tex/context/base/strc-mat.mkiv | 962 +- Master/texmf-dist/tex/context/base/strc-not.lua | 75 +- Master/texmf-dist/tex/context/base/strc-not.mkii | 32 + Master/texmf-dist/tex/context/base/strc-not.mkiv | 405 +- Master/texmf-dist/tex/context/base/strc-num.lua | 98 +- Master/texmf-dist/tex/context/base/strc-num.mkiv | 110 +- Master/texmf-dist/tex/context/base/strc-pag.lua | 110 +- Master/texmf-dist/tex/context/base/strc-pag.mkiv | 97 +- Master/texmf-dist/tex/context/base/strc-prc.lua | 2 +- Master/texmf-dist/tex/context/base/strc-prc.mkiv | 2 +- Master/texmf-dist/tex/context/base/strc-ref.lua | 641 +- Master/texmf-dist/tex/context/base/strc-ref.mkii | 3 + Master/texmf-dist/tex/context/base/strc-ref.mkiv | 290 +- Master/texmf-dist/tex/context/base/strc-reg.lua | 245 +- Master/texmf-dist/tex/context/base/strc-reg.mkii | 3 +- Master/texmf-dist/tex/context/base/strc-reg.mkiv | 282 +- Master/texmf-dist/tex/context/base/strc-ren.mkiv | 95 +- Master/texmf-dist/tex/context/base/strc-sbe.mkiv | 11 +- Master/texmf-dist/tex/context/base/strc-sec.mkiv | 244 +- Master/texmf-dist/tex/context/base/strc-syn.lua | 42 +- Master/texmf-dist/tex/context/base/strc-syn.mkiv | 103 +- Master/texmf-dist/tex/context/base/strc-xml.mkiv | 18 +- Master/texmf-dist/tex/context/base/supp-box.tex | 7 + Master/texmf-dist/tex/context/base/supp-dir.mkiv | 23 + Master/texmf-dist/tex/context/base/supp-fil.lua | 201 +- Master/texmf-dist/tex/context/base/supp-fil.mkii | 2 + Master/texmf-dist/tex/context/base/supp-fil.mkiv | 199 +- Master/texmf-dist/tex/context/base/supp-mat.mkii | 293 + Master/texmf-dist/tex/context/base/supp-mat.mkiv | 293 + Master/texmf-dist/tex/context/base/supp-mat.tex | 293 - Master/texmf-dist/tex/context/base/supp-mis.mkii | 27 +- Master/texmf-dist/tex/context/base/supp-mis.tex | 9 +- Master/texmf-dist/tex/context/base/supp-mpe.tex | 9 +- Master/texmf-dist/tex/context/base/supp-mps.mkii | 3 +- Master/texmf-dist/tex/context/base/supp-num.tex | 23 +- Master/texmf-dist/tex/context/base/supp-pdf.tex | 3 +- Master/texmf-dist/tex/context/base/supp-ran.lua | 31 +- Master/texmf-dist/tex/context/base/supp-vis.tex | 11 +- Master/texmf-dist/tex/context/base/symb-ini.mkiv | 10 +- Master/texmf-dist/tex/context/base/syst-aux.mkiv | 246 +- Master/texmf-dist/tex/context/base/syst-con.lua | 2 +- Master/texmf-dist/tex/context/base/syst-ext.mkii | 29 +- Master/texmf-dist/tex/context/base/syst-gen.mkii | 10 +- Master/texmf-dist/tex/context/base/syst-ini.tex | 17 +- Master/texmf-dist/tex/context/base/syst-ltx.tex | 56 + Master/texmf-dist/tex/context/base/syst-lua.lua | 46 +- Master/texmf-dist/tex/context/base/tabl-com.mkii | 21 + Master/texmf-dist/tex/context/base/tabl-com.mkiv | 21 + Master/texmf-dist/tex/context/base/tabl-ltb.mkii | 2 +- Master/texmf-dist/tex/context/base/tabl-ltb.mkiv | 30 +- Master/texmf-dist/tex/context/base/tabl-ntb.mkii | 2 + Master/texmf-dist/tex/context/base/tabl-ntb.mkiv | 100 +- Master/texmf-dist/tex/context/base/tabl-tab.mkiv | 81 +- Master/texmf-dist/tex/context/base/tabl-tbl.mkii | 3 +- Master/texmf-dist/tex/context/base/tabl-tbl.mkiv | 108 +- Master/texmf-dist/tex/context/base/tabl-tsp.mkiv | 6 +- Master/texmf-dist/tex/context/base/task-ini.lua | 101 +- Master/texmf-dist/tex/context/base/toks-ini.lua | 12 +- Master/texmf-dist/tex/context/base/toks-ini.mkiv | 4 +- Master/texmf-dist/tex/context/base/trac-deb.lua | 73 +- Master/texmf-dist/tex/context/base/trac-deb.mkiv | 16 +- Master/texmf-dist/tex/context/base/trac-inf.lua | 36 +- Master/texmf-dist/tex/context/base/trac-lmx.lua | 263 +- Master/texmf-dist/tex/context/base/trac-log.lua | 70 +- Master/texmf-dist/tex/context/base/trac-tex.mkiv | 19 + Master/texmf-dist/tex/context/base/trac-tim.lua | 101 +- Master/texmf-dist/tex/context/base/trac-tra.lua | 180 +- Master/texmf-dist/tex/context/base/type-buy.mkii | 571 + Master/texmf-dist/tex/context/base/type-buy.mkiv | 82 + Master/texmf-dist/tex/context/base/type-buy.tex | 791 - Master/texmf-dist/tex/context/base/type-cbg.mkii | 352 + Master/texmf-dist/tex/context/base/type-cbg.tex | 352 - Master/texmf-dist/tex/context/base/type-cow.mkii | 85 + Master/texmf-dist/tex/context/base/type-cow.mkiv | 85 + Master/texmf-dist/tex/context/base/type-cow.tex | 85 - Master/texmf-dist/tex/context/base/type-def.mkii | 86 + Master/texmf-dist/tex/context/base/type-def.mkiv | 48 + Master/texmf-dist/tex/context/base/type-def.tex | 128 - Master/texmf-dist/tex/context/base/type-exp.mkii | 95 + Master/texmf-dist/tex/context/base/type-exp.tex | 95 - Master/texmf-dist/tex/context/base/type-fsf.mkii | 94 + Master/texmf-dist/tex/context/base/type-fsf.tex | 94 - Master/texmf-dist/tex/context/base/type-ghz.mkii | 409 + Master/texmf-dist/tex/context/base/type-ghz.mkiv | 322 + Master/texmf-dist/tex/context/base/type-ghz.tex | 409 - Master/texmf-dist/tex/context/base/type-hgz.mkii | 1 + Master/texmf-dist/tex/context/base/type-hgz.mkiv | 1 + Master/texmf-dist/tex/context/base/type-hgz.tex | 322 - .../tex/context/base/type-husayni-default.mkiv | 146 + Master/texmf-dist/tex/context/base/type-ini.mkii | 19 +- Master/texmf-dist/tex/context/base/type-ini.mkiv | 296 +- Master/texmf-dist/tex/context/base/type-lua.mkiv | 88 + Master/texmf-dist/tex/context/base/type-mac.mkii | 373 + Master/texmf-dist/tex/context/base/type-mac.mkiv | 373 + Master/texmf-dist/tex/context/base/type-mac.tex | 434 - Master/texmf-dist/tex/context/base/type-msw.mkii | 1 + Master/texmf-dist/tex/context/base/type-msw.mkiv | 1 + Master/texmf-dist/tex/context/base/type-msw.tex | 69 - Master/texmf-dist/tex/context/base/type-one.mkii | 3867 +- Master/texmf-dist/tex/context/base/type-one.mkiv | 99 +- Master/texmf-dist/tex/context/base/type-one.tex | 16 - Master/texmf-dist/tex/context/base/type-otf.mkii | 2426 +- Master/texmf-dist/tex/context/base/type-otf.mkiv | 2162 +- Master/texmf-dist/tex/context/base/type-otf.tex | 155 - Master/texmf-dist/tex/context/base/type-pre.mkii | 253 + Master/texmf-dist/tex/context/base/type-pre.tex | 253 - Master/texmf-dist/tex/context/base/type-run.mkiv | 4 +- Master/texmf-dist/tex/context/base/type-set.mkii | 81 + Master/texmf-dist/tex/context/base/type-set.mkiv | 77 + Master/texmf-dist/tex/context/base/type-siz.mkii | 157 + Master/texmf-dist/tex/context/base/type-siz.mkiv | 159 + Master/texmf-dist/tex/context/base/type-siz.tex | 179 - Master/texmf-dist/tex/context/base/type-tmf.mkii | 1147 - Master/texmf-dist/tex/context/base/type-tmf.mkiv | 919 - Master/texmf-dist/tex/context/base/type-tmf.tex | 16 - Master/texmf-dist/tex/context/base/type-win.mkii | 69 + Master/texmf-dist/tex/context/base/type-win.mkiv | 118 + Master/texmf-dist/tex/context/base/type-win.tex | 120 - Master/texmf-dist/tex/context/base/type-xtx.mkii | 563 + Master/texmf-dist/tex/context/base/type-xtx.tex | 564 - Master/texmf-dist/tex/context/base/typo-brk.lua | 119 +- Master/texmf-dist/tex/context/base/typo-brk.mkiv | 46 +- Master/texmf-dist/tex/context/base/typo-cap.lua | 14 +- Master/texmf-dist/tex/context/base/typo-cap.mkiv | 22 +- Master/texmf-dist/tex/context/base/typo-dig.lua | 106 + Master/texmf-dist/tex/context/base/typo-dig.mkiv | 51 + Master/texmf-dist/tex/context/base/typo-krn.lua | 84 +- Master/texmf-dist/tex/context/base/typo-krn.mkiv | 60 +- Master/texmf-dist/tex/context/base/typo-mir.lua | 63 +- Master/texmf-dist/tex/context/base/typo-mir.mkiv | 14 +- Master/texmf-dist/tex/context/base/typo-rep.lua | 135 + Master/texmf-dist/tex/context/base/typo-rep.mkiv | 53 + Master/texmf-dist/tex/context/base/typo-spa.lua | 27 +- Master/texmf-dist/tex/context/base/typo-spa.mkiv | 12 +- Master/texmf-dist/tex/context/base/unic-031.mkii | 10 - Master/texmf-dist/tex/context/base/verb-eif.mkii | 2 +- Master/texmf-dist/tex/context/base/verb-ini.mkii | 25 +- Master/texmf-dist/tex/context/base/verb-js.mkii | 2 +- Master/texmf-dist/tex/context/base/verb-jv.mkii | 2 +- Master/texmf-dist/tex/context/base/verb-pas.mkii | 2 +- Master/texmf-dist/tex/context/base/verb-raw.mkii | 6 +- Master/texmf-dist/tex/context/base/verb-sql.mkii | 2 +- Master/texmf-dist/tex/context/base/x-asciimath.lua | 267 + .../texmf-dist/tex/context/base/x-asciimath.mkiv | 96 + Master/texmf-dist/tex/context/base/x-calcmath.lua | 289 +- Master/texmf-dist/tex/context/base/x-calcmath.mkiv | 2 - Master/texmf-dist/tex/context/base/x-cals.lua | 209 + Master/texmf-dist/tex/context/base/x-cals.mkiv | 207 +- Master/texmf-dist/tex/context/base/x-chemml.mkiv | 2 +- Master/texmf-dist/tex/context/base/x-contml.mkii | 491 + Master/texmf-dist/tex/context/base/x-contml.tex | 493 - Master/texmf-dist/tex/context/base/x-corres.mkii | 136 + Master/texmf-dist/tex/context/base/x-corres.tex | 136 - Master/texmf-dist/tex/context/base/x-ct.lua | 182 + Master/texmf-dist/tex/context/base/x-ct.mkiv | 179 +- Master/texmf-dist/tex/context/base/x-dir-05.mkii | 51 + Master/texmf-dist/tex/context/base/x-dir-05.mkiv | 72 + Master/texmf-dist/tex/context/base/x-fe.mkii | 143 + Master/texmf-dist/tex/context/base/x-fe.tex | 143 - Master/texmf-dist/tex/context/base/x-fig-03.tex | 10 +- Master/texmf-dist/tex/context/base/x-fo.mkii | 4059 ++ Master/texmf-dist/tex/context/base/x-fo.tex | 4063 -- Master/texmf-dist/tex/context/base/x-foxet.mkii | 28 + Master/texmf-dist/tex/context/base/x-foxet.mkiv | 29 + Master/texmf-dist/tex/context/base/x-foxet.tex | 15 - Master/texmf-dist/tex/context/base/x-ldx.tex | 20 +- Master/texmf-dist/tex/context/base/x-mathml.lua | 331 +- Master/texmf-dist/tex/context/base/x-mathml.mkiv | 77 +- Master/texmf-dist/tex/context/base/x-pending.mkiv | 39 + Master/texmf-dist/tex/context/base/x-set-11.mkii | 556 +- Master/texmf-dist/tex/context/base/x-set-11.mkiv | 566 +- Master/texmf-dist/tex/context/base/x-set-11.tex | 890 +- Master/texmf-dist/tex/context/base/x-set-12.tex | 17 +- Master/texmf-dist/tex/context/base/x-set-99.tex | 283 + Master/texmf-dist/tex/context/base/x-xtag.mkiv | 23 + Master/texmf-dist/tex/context/bib/bibl-apa-de.tex | 4 +- Master/texmf-dist/tex/context/bib/t-bib.mkii | 5 - Master/texmf-dist/tex/context/bib/t-bib.mkiv | 62 - Master/texmf-dist/tex/context/bib/t-bib.tex | 1929 - Master/texmf-dist/tex/context/bib/t-bibltx.tex | 75 - Master/texmf-dist/tex/context/config/cont-cz.ini | 5 - Master/texmf-dist/tex/context/config/cont-de.ini | 5 - Master/texmf-dist/tex/context/config/cont-en.ini | 5 - Master/texmf-dist/tex/context/config/cont-fmt.tex | 29 - Master/texmf-dist/tex/context/config/cont-fr.ini | 5 - Master/texmf-dist/tex/context/config/cont-it.ini | 5 - Master/texmf-dist/tex/context/config/cont-nl.ini | 5 - Master/texmf-dist/tex/context/config/cont-ro.ini | 5 - Master/texmf-dist/tex/context/config/cont-uk.ini | 5 - .../texmf-dist/tex/context/fonts/antykwa-math.lfg | 57 + .../texmf-dist/tex/context/fonts/charter-math.lfg | 28 + .../texmf-dist/tex/context/fonts/garamond-math.lfg | 28 + Master/texmf-dist/tex/context/fonts/husayni.lfg | 178 + .../texmf-dist/tex/context/fonts/hvmath-math.lfg | 24 + .../texmf-dist/tex/context/fonts/informal-math.lfg | 22 + Master/texmf-dist/tex/context/fonts/iwona-math.lfg | 57 + Master/texmf-dist/tex/context/fonts/lm-math.lfg | 230 + .../texmf-dist/tex/context/fonts/lucida-math.lfg | 320 + .../tex/context/fonts/mathtimes-math.lfg | 24 + Master/texmf-dist/tex/context/fonts/px-math.lfg | 24 + .../texmf-dist/tex/context/fonts/symbol-math.lfg | 17 + Master/texmf-dist/tex/context/fonts/tx-math.lfg | 24 + .../texmf-dist/tex/context/fonts/utopia-math.lfg | 28 + .../texmf-dist/tex/context/interface/cont-cs.xml | 71 +- .../texmf-dist/tex/context/interface/cont-de.xml | 71 +- .../texmf-dist/tex/context/interface/cont-en.xml | 59 +- .../texmf-dist/tex/context/interface/cont-fr.xml | 67 +- .../texmf-dist/tex/context/interface/cont-it.xml | 71 +- .../texmf-dist/tex/context/interface/cont-nl.xml | 71 +- .../texmf-dist/tex/context/interface/cont-pe.xml | 71 +- .../texmf-dist/tex/context/interface/cont-ro.xml | 71 +- .../texmf-dist/tex/context/interface/keys-cs.xml | 69 +- .../texmf-dist/tex/context/interface/keys-de.xml | 69 +- .../texmf-dist/tex/context/interface/keys-en.xml | 73 +- .../texmf-dist/tex/context/interface/keys-fr.xml | 69 +- .../texmf-dist/tex/context/interface/keys-it.xml | 69 +- .../texmf-dist/tex/context/interface/keys-nl.xml | 73 +- .../texmf-dist/tex/context/interface/keys-pe.xml | 69 +- .../texmf-dist/tex/context/interface/keys-ro.xml | 69 +- Master/texmf-dist/tex/context/interface/t-bib.xml | 411 - Master/texmf-dist/tex/context/patterns/lang-ba.hyp | 36 - Master/texmf-dist/tex/context/patterns/lang-ba.pat | 224 - Master/texmf-dist/tex/context/patterns/lang-bg.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-bg.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-ca.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-ca.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-ca.rme | 2 +- Master/texmf-dist/tex/context/patterns/lang-cs.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-cs.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-cy.hyp | 33 +- Master/texmf-dist/tex/context/patterns/lang-cy.pat | 35 +- Master/texmf-dist/tex/context/patterns/lang-cy.rme | 64 + Master/texmf-dist/tex/context/patterns/lang-da.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-da.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-de.hyp | 6 +- Master/texmf-dist/tex/context/patterns/lang-de.pat | 14392 ++-- Master/texmf-dist/tex/context/patterns/lang-de.rme | 16 +- .../texmf-dist/tex/context/patterns/lang-deo.hyp | 6 +- .../texmf-dist/tex/context/patterns/lang-deo.pat | 14238 ++-- .../texmf-dist/tex/context/patterns/lang-deo.rme | 16 +- Master/texmf-dist/tex/context/patterns/lang-es.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-es.pat | 1238 +- Master/texmf-dist/tex/context/patterns/lang-es.rme | 96 +- Master/texmf-dist/tex/context/patterns/lang-et.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-et.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-eu.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-eu.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-fi.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-fi.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-fr.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-fr.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-gb.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-gb.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-hr.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-hr.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-hu.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-hu.pat | 68283 ++++++++++++++++--- Master/texmf-dist/tex/context/patterns/lang-hu.rme | 69 +- Master/texmf-dist/tex/context/patterns/lang-il.hyp | 8 - Master/texmf-dist/tex/context/patterns/lang-il.pat | 1905 - Master/texmf-dist/tex/context/patterns/lang-il.rme | 70 - Master/texmf-dist/tex/context/patterns/lang-is.hyp | 32 +- Master/texmf-dist/tex/context/patterns/lang-is.pat | 4219 +- Master/texmf-dist/tex/context/patterns/lang-is.rme | 79 + Master/texmf-dist/tex/context/patterns/lang-it.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-it.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-la.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-la.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-lt.hyp | 8 + Master/texmf-dist/tex/context/patterns/lang-lt.pat | 1553 + Master/texmf-dist/tex/context/patterns/lang-lt.rme | 47 + Master/texmf-dist/tex/context/patterns/lang-mn.hyp | 8 + Master/texmf-dist/tex/context/patterns/lang-mn.pat | 995 + Master/texmf-dist/tex/context/patterns/lang-mn.rme | 86 + Master/texmf-dist/tex/context/patterns/lang-nb.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-nb.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-nl.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-nl.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-nn.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-nn.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-no.hyp | 118 - Master/texmf-dist/tex/context/patterns/lang-no.pat | 26818 -------- Master/texmf-dist/tex/context/patterns/lang-pl.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-pl.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-pt.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-pt.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-ro.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-ro.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-ru.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-ru.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-sk.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-sk.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-sl.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-sl.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-sr.hyp | 137 + Master/texmf-dist/tex/context/patterns/lang-sr.pat | 2432 + Master/texmf-dist/tex/context/patterns/lang-sr.rme | 80 + Master/texmf-dist/tex/context/patterns/lang-sv.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-sv.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-tk.hyp | 8 + Master/texmf-dist/tex/context/patterns/lang-tk.pat | 2378 + Master/texmf-dist/tex/context/patterns/lang-tk.rme | 18 + Master/texmf-dist/tex/context/patterns/lang-tr.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-tr.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-us.hyp | 2 +- Master/texmf-dist/tex/context/patterns/lang-us.pat | 2 +- Master/texmf-dist/tex/context/patterns/lang-zh.hyp | 8 + Master/texmf-dist/tex/context/patterns/lang-zh.pat | 195 + Master/texmf-dist/tex/context/patterns/lang-zh.rme | 88 + Master/texmf-dist/tex/context/sample/demo-cld.cld | 74 - Master/texmf-dist/tex/context/sample/sample.tex | 68 +- .../texmf-dist/tex/context/test/chem-str-test.tex | 560 - .../texmf-dist/tex/context/test/sort-lan-test.tex | 38 - Master/texmf-dist/tex/context/test/x-cals-test.tex | 48 - Master/texmf-dist/tex/context/test/x-cals-test.xml | 522 - Master/texmf-dist/tex/context/test/x-cml-test.tex | 9 - Master/texmf-dist/tex/context/test/x-cml-test.xml | 210 - Master/texmf-dist/tex/context/test/x-ct-test.tex | 7 - Master/texmf-dist/tex/context/test/x-ct-test.xml | 149 - Master/texmf-dist/tex/context/test/x-mmp-test.tex | 12 - Master/texmf-dist/tex/context/test/x-mmp-test.xml | 1270 - .../tex/generic/context/luatex-fonts-merged.lua | 4321 +- .../tex/generic/context/luatex-fonts.lua | 2 +- .../tex/generic/context/luatex-mplib.lua | 6 +- .../texmf-dist/tex/generic/context/luatex-test.tex | 6 + Master/texmf-dist/tex/generic/context/m-ch-en.tex | 4 +- 960 files changed, 190374 insertions(+), 128283 deletions(-) create mode 100644 Master/texmf-dist/context/data/scite/scite-ctx.lua create mode 100644 Master/texmf-dist/context/data/textadept/context.lua create mode 100644 Master/texmf-dist/context/data/texworks/TUG/TeXworks.ini delete mode 100644 Master/texmf-dist/context/data/texworks/TUG/texworks.ini create mode 100644 Master/texmf-dist/context/data/texworks/configuration/tools.ini create mode 100644 Master/texmf-dist/fonts/enc/dvips/context/ec-2004.enc create mode 100644 Master/texmf-dist/fonts/map/pdftex/context/mkiv-base.map create mode 100644 Master/texmf-dist/fonts/map/pdftex/context/mkiv-px.map create mode 100644 Master/texmf-dist/fonts/map/pdftex/context/mkiv-tx.map create mode 100644 Master/texmf-dist/scripts/context/lua/mtx-modules.lua create mode 100644 Master/texmf-dist/scripts/context/lua/mtx-mtxworks.lua create mode 100644 Master/texmf-dist/scripts/context/lua/mtx-scite.lua delete mode 100644 Master/texmf-dist/scripts/context/lua/scite-ctx.lua delete mode 100644 Master/texmf-dist/scripts/context/perl/texshow.pl delete mode 100644 Master/texmf-dist/scripts/context/ruby/newimgtopdf.rb delete mode 100644 Master/texmf-dist/scripts/context/ruby/newpstopdf.rb delete mode 100644 Master/texmf-dist/scripts/context/ruby/newtexexec.rb delete mode 100644 Master/texmf-dist/scripts/context/ruby/newtexutil.rb delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/context.cmd create mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/context.exe delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/ctxtools.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/luatools.cmd create mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/luatools.exe delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/makempy.bat delete mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/metatex.cmd create mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/metatex.exe delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/mpstools.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/mptopdf.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.cmd create mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll create mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/mtxtools.bat delete mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/mtxworks.cmd delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/pdftools.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/pstopdf.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/rlxtools.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/runtools.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/texexec.bat delete mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/texexec.cmd create mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/texexec.exe delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/texfont.bat delete mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.cmd create mode 100644 Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.exe delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/textools.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/texutil.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/tmftools.bat delete mode 100755 Master/texmf-dist/scripts/context/stubs/mswin/xmltools.bat create mode 100644 Master/texmf-dist/scripts/context/stubs/source/mtxrun_dll.c create mode 100644 Master/texmf-dist/scripts/context/stubs/source/mtxrun_exe.c create mode 100644 Master/texmf-dist/scripts/context/stubs/source/readme.txt delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/ctxtools delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/makempy delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/mpstools delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/mptopdf delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/mtxtools delete mode 100644 Master/texmf-dist/scripts/context/stubs/unix/mtxworks delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/pdftools delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/pstopdf delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/rlxtools delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/runtools delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/texfont delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/textools delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/texutil delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/tmftools delete mode 100755 Master/texmf-dist/scripts/context/stubs/unix/xmltools create mode 100644 Master/texmf-dist/tex/context/base/back-u3d.mkiv create mode 100644 Master/texmf-dist/tex/context/base/bibl-tra.lua create mode 100644 Master/texmf-dist/tex/context/base/bibl-tra.mkii create mode 100644 Master/texmf-dist/tex/context/base/bibl-tra.mkiv create mode 100644 Master/texmf-dist/tex/context/base/blob-ini.lua create mode 100644 Master/texmf-dist/tex/context/base/blob-ini.mkiv create mode 100644 Master/texmf-dist/tex/context/base/bxml-apa.mkiv create mode 100644 Master/texmf-dist/tex/context/base/char-tex.lua delete mode 100644 Master/texmf-dist/tex/context/base/colo-new.mkii delete mode 100644 Master/texmf-dist/tex/context/base/colo-new.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/colo-new.tex create mode 100644 Master/texmf-dist/tex/context/base/colo-x11.tex delete mode 100644 Master/texmf-dist/tex/context/base/cont-cz.tex delete mode 100644 Master/texmf-dist/tex/context/base/cont-uk.tex delete mode 100644 Master/texmf-dist/tex/context/base/core-bar.tex delete mode 100644 Master/texmf-dist/tex/context/base/core-blk.tex delete mode 100644 Master/texmf-dist/tex/context/base/core-box.tex delete mode 100644 Master/texmf-dist/tex/context/base/core-grd.mkii delete mode 100644 Master/texmf-dist/tex/context/base/core-grd.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/core-spa.lua delete mode 100644 Master/texmf-dist/tex/context/base/core-spa.mkii delete mode 100644 Master/texmf-dist/tex/context/base/core-spa.mkiv create mode 100644 Master/texmf-dist/tex/context/base/core-sys.lua create mode 100644 Master/texmf-dist/tex/context/base/data-sch.lua create mode 100644 Master/texmf-dist/tex/context/base/enco-l7x.mkii create mode 100644 Master/texmf-dist/tex/context/base/font-agl.lua create mode 100644 Master/texmf-dist/tex/context/base/font-enh.lua create mode 100644 Master/texmf-dist/tex/context/base/font-gds.lua create mode 100644 Master/texmf-dist/tex/context/base/font-gds.mkiv create mode 100644 Master/texmf-dist/tex/context/base/grph-fil.lua create mode 100644 Master/texmf-dist/tex/context/base/grph-swf.lua create mode 100644 Master/texmf-dist/tex/context/base/l-pdfview.lua create mode 100644 Master/texmf-dist/tex/context/base/lang-wrd.lua create mode 100644 Master/texmf-dist/tex/context/base/lang-wrd.mkiv create mode 100644 Master/texmf-dist/tex/context/base/lpdf-pdx.lua create mode 100644 Master/texmf-dist/tex/context/base/lpdf-pdx.mkiv create mode 100644 Master/texmf-dist/tex/context/base/lpdf-swf.lua create mode 100644 Master/texmf-dist/tex/context/base/lpdf-xmp.lua create mode 100644 Master/texmf-dist/tex/context/base/lpdf-xmp.xml create mode 100644 Master/texmf-dist/tex/context/base/lxml-aux.lua create mode 100644 Master/texmf-dist/tex/context/base/lxml-ctx.lua create mode 100644 Master/texmf-dist/tex/context/base/lxml-ctx.mkiv create mode 100644 Master/texmf-dist/tex/context/base/lxml-dir.lua create mode 100644 Master/texmf-dist/tex/context/base/lxml-inf.lua delete mode 100644 Master/texmf-dist/tex/context/base/lxml-ini.lua create mode 100644 Master/texmf-dist/tex/context/base/lxml-lpt.lua delete mode 100644 Master/texmf-dist/tex/context/base/lxml-pth.lua create mode 100644 Master/texmf-dist/tex/context/base/lxml-sor.lua create mode 100644 Master/texmf-dist/tex/context/base/lxml-sor.mkiv create mode 100644 Master/texmf-dist/tex/context/base/lxml-tex.lua create mode 100644 Master/texmf-dist/tex/context/base/lxml-xml.lua create mode 100644 Master/texmf-dist/tex/context/base/m-barcodes.mkiv create mode 100644 Master/texmf-dist/tex/context/base/m-chart.mkii create mode 100644 Master/texmf-dist/tex/context/base/m-chart.mkiv create mode 100644 Master/texmf-dist/tex/context/base/m-directives.tex create mode 100644 Master/texmf-dist/tex/context/base/m-obsolete.tex delete mode 100644 Master/texmf-dist/tex/context/base/m-pstric.tex create mode 100644 Master/texmf-dist/tex/context/base/m-pstricks.lua create mode 100644 Master/texmf-dist/tex/context/base/m-pstricks.mkii create mode 100644 Master/texmf-dist/tex/context/base/m-pstricks.mkiv create mode 100644 Master/texmf-dist/tex/context/base/m-pstricks.tex create mode 100644 Master/texmf-dist/tex/context/base/m-punk.mkiv create mode 100644 Master/texmf-dist/tex/context/base/m-steps.mkii create mode 100644 Master/texmf-dist/tex/context/base/m-steps.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/m-track.tex create mode 100644 Master/texmf-dist/tex/context/base/m-trackers.tex create mode 100644 Master/texmf-dist/tex/context/base/math-lan.mkiv create mode 100644 Master/texmf-dist/tex/context/base/mtx-context-common.tex create mode 100644 Master/texmf-dist/tex/context/base/mult-clm.lua create mode 100644 Master/texmf-dist/tex/context/base/node-aux.lua create mode 100644 Master/texmf-dist/tex/context/base/node-dir.lua create mode 100644 Master/texmf-dist/tex/context/base/node-mig.lua create mode 100644 Master/texmf-dist/tex/context/base/node-mig.mkiv create mode 100644 Master/texmf-dist/tex/context/base/node-pag.lua create mode 100644 Master/texmf-dist/tex/context/base/node-pag.mkiv create mode 100644 Master/texmf-dist/tex/context/base/node-rul.lua create mode 100644 Master/texmf-dist/tex/context/base/node-rul.mkiv create mode 100644 Master/texmf-dist/tex/context/base/node-typ.lua create mode 100644 Master/texmf-dist/tex/context/base/page-flt.lua create mode 100644 Master/texmf-dist/tex/context/base/page-flt.mkiv create mode 100644 Master/texmf-dist/tex/context/base/page-str.lua create mode 100644 Master/texmf-dist/tex/context/base/pret-xml.lua create mode 100644 Master/texmf-dist/tex/context/base/s-fnt-26.tex create mode 100644 Master/texmf-dist/tex/context/base/s-inf-01.tex create mode 100644 Master/texmf-dist/tex/context/base/s-inf-02.tex create mode 100644 Master/texmf-dist/tex/context/base/s-mod.ctx create mode 100644 Master/texmf-dist/tex/context/base/spac-ali.mkiv create mode 100644 Master/texmf-dist/tex/context/base/spac-def.mkiv create mode 100644 Master/texmf-dist/tex/context/base/spac-fnt.mkiv create mode 100644 Master/texmf-dist/tex/context/base/spac-gen.mkii create mode 100644 Master/texmf-dist/tex/context/base/spac-grd.mkii create mode 100644 Master/texmf-dist/tex/context/base/spac-grd.mkiv create mode 100644 Master/texmf-dist/tex/context/base/spac-hor.lua create mode 100644 Master/texmf-dist/tex/context/base/spac-hor.mkiv create mode 100644 Master/texmf-dist/tex/context/base/spac-pag.mkiv create mode 100644 Master/texmf-dist/tex/context/base/spac-par.mkiv create mode 100644 Master/texmf-dist/tex/context/base/spac-ver.lua create mode 100644 Master/texmf-dist/tex/context/base/spac-ver.mkiv create mode 100644 Master/texmf-dist/tex/context/base/supp-mat.mkii create mode 100644 Master/texmf-dist/tex/context/base/supp-mat.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/supp-mat.tex create mode 100644 Master/texmf-dist/tex/context/base/syst-ltx.tex create mode 100644 Master/texmf-dist/tex/context/base/tabl-com.mkii create mode 100644 Master/texmf-dist/tex/context/base/tabl-com.mkiv create mode 100644 Master/texmf-dist/tex/context/base/type-buy.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-buy.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-buy.tex create mode 100644 Master/texmf-dist/tex/context/base/type-cbg.mkii delete mode 100644 Master/texmf-dist/tex/context/base/type-cbg.tex create mode 100644 Master/texmf-dist/tex/context/base/type-cow.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-cow.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-cow.tex create mode 100644 Master/texmf-dist/tex/context/base/type-def.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-def.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-def.tex create mode 100644 Master/texmf-dist/tex/context/base/type-exp.mkii delete mode 100644 Master/texmf-dist/tex/context/base/type-exp.tex create mode 100644 Master/texmf-dist/tex/context/base/type-fsf.mkii delete mode 100644 Master/texmf-dist/tex/context/base/type-fsf.tex create mode 100644 Master/texmf-dist/tex/context/base/type-ghz.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-ghz.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-ghz.tex create mode 100644 Master/texmf-dist/tex/context/base/type-hgz.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-hgz.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-hgz.tex create mode 100644 Master/texmf-dist/tex/context/base/type-husayni-default.mkiv create mode 100644 Master/texmf-dist/tex/context/base/type-lua.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-mac.tex create mode 100644 Master/texmf-dist/tex/context/base/type-msw.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-msw.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-msw.tex delete mode 100644 Master/texmf-dist/tex/context/base/type-one.tex delete mode 100644 Master/texmf-dist/tex/context/base/type-otf.tex create mode 100644 Master/texmf-dist/tex/context/base/type-pre.mkii delete mode 100644 Master/texmf-dist/tex/context/base/type-pre.tex create mode 100644 Master/texmf-dist/tex/context/base/type-set.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-set.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-siz.tex delete mode 100644 Master/texmf-dist/tex/context/base/type-tmf.tex create mode 100644 Master/texmf-dist/tex/context/base/type-win.mkii create mode 100644 Master/texmf-dist/tex/context/base/type-win.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/type-win.tex create mode 100644 Master/texmf-dist/tex/context/base/type-xtx.mkii delete mode 100644 Master/texmf-dist/tex/context/base/type-xtx.tex create mode 100644 Master/texmf-dist/tex/context/base/typo-dig.lua create mode 100644 Master/texmf-dist/tex/context/base/typo-dig.mkiv create mode 100644 Master/texmf-dist/tex/context/base/typo-rep.lua create mode 100644 Master/texmf-dist/tex/context/base/typo-rep.mkiv create mode 100644 Master/texmf-dist/tex/context/base/x-asciimath.lua create mode 100644 Master/texmf-dist/tex/context/base/x-asciimath.mkiv create mode 100644 Master/texmf-dist/tex/context/base/x-cals.lua create mode 100644 Master/texmf-dist/tex/context/base/x-contml.mkii delete mode 100644 Master/texmf-dist/tex/context/base/x-contml.tex create mode 100644 Master/texmf-dist/tex/context/base/x-corres.mkii delete mode 100644 Master/texmf-dist/tex/context/base/x-corres.tex create mode 100644 Master/texmf-dist/tex/context/base/x-ct.lua create mode 100644 Master/texmf-dist/tex/context/base/x-dir-05.mkii create mode 100644 Master/texmf-dist/tex/context/base/x-dir-05.mkiv create mode 100644 Master/texmf-dist/tex/context/base/x-fe.mkii delete mode 100644 Master/texmf-dist/tex/context/base/x-fe.tex create mode 100644 Master/texmf-dist/tex/context/base/x-fo.mkii delete mode 100644 Master/texmf-dist/tex/context/base/x-fo.tex create mode 100644 Master/texmf-dist/tex/context/base/x-foxet.mkii create mode 100644 Master/texmf-dist/tex/context/base/x-foxet.mkiv delete mode 100644 Master/texmf-dist/tex/context/base/x-foxet.tex create mode 100644 Master/texmf-dist/tex/context/base/x-pending.mkiv create mode 100644 Master/texmf-dist/tex/context/base/x-set-99.tex create mode 100644 Master/texmf-dist/tex/context/base/x-xtag.mkiv delete mode 100644 Master/texmf-dist/tex/context/bib/t-bib.mkii delete mode 100644 Master/texmf-dist/tex/context/bib/t-bib.mkiv delete mode 100644 Master/texmf-dist/tex/context/bib/t-bib.tex delete mode 100644 Master/texmf-dist/tex/context/bib/t-bibltx.tex delete mode 100644 Master/texmf-dist/tex/context/config/cont-cz.ini delete mode 100644 Master/texmf-dist/tex/context/config/cont-de.ini delete mode 100644 Master/texmf-dist/tex/context/config/cont-en.ini delete mode 100644 Master/texmf-dist/tex/context/config/cont-fmt.tex delete mode 100644 Master/texmf-dist/tex/context/config/cont-fr.ini delete mode 100644 Master/texmf-dist/tex/context/config/cont-it.ini delete mode 100644 Master/texmf-dist/tex/context/config/cont-nl.ini delete mode 100644 Master/texmf-dist/tex/context/config/cont-ro.ini delete mode 100644 Master/texmf-dist/tex/context/config/cont-uk.ini create mode 100644 Master/texmf-dist/tex/context/fonts/antykwa-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/charter-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/garamond-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/husayni.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/hvmath-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/informal-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/iwona-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/lm-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/lucida-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/mathtimes-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/px-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/symbol-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/tx-math.lfg create mode 100644 Master/texmf-dist/tex/context/fonts/utopia-math.lfg delete mode 100644 Master/texmf-dist/tex/context/interface/t-bib.xml delete mode 100644 Master/texmf-dist/tex/context/patterns/lang-ba.hyp delete mode 100644 Master/texmf-dist/tex/context/patterns/lang-ba.pat create mode 100644 Master/texmf-dist/tex/context/patterns/lang-cy.rme delete mode 100644 Master/texmf-dist/tex/context/patterns/lang-il.hyp delete mode 100644 Master/texmf-dist/tex/context/patterns/lang-il.pat delete mode 100644 Master/texmf-dist/tex/context/patterns/lang-il.rme create mode 100644 Master/texmf-dist/tex/context/patterns/lang-is.rme create mode 100644 Master/texmf-dist/tex/context/patterns/lang-lt.hyp create mode 100644 Master/texmf-dist/tex/context/patterns/lang-lt.pat create mode 100644 Master/texmf-dist/tex/context/patterns/lang-lt.rme create mode 100644 Master/texmf-dist/tex/context/patterns/lang-mn.hyp create mode 100644 Master/texmf-dist/tex/context/patterns/lang-mn.pat create mode 100644 Master/texmf-dist/tex/context/patterns/lang-mn.rme delete mode 100644 Master/texmf-dist/tex/context/patterns/lang-no.hyp delete mode 100644 Master/texmf-dist/tex/context/patterns/lang-no.pat create mode 100644 Master/texmf-dist/tex/context/patterns/lang-sr.hyp create mode 100644 Master/texmf-dist/tex/context/patterns/lang-sr.pat create mode 100644 Master/texmf-dist/tex/context/patterns/lang-sr.rme create mode 100644 Master/texmf-dist/tex/context/patterns/lang-tk.hyp create mode 100644 Master/texmf-dist/tex/context/patterns/lang-tk.pat create mode 100644 Master/texmf-dist/tex/context/patterns/lang-tk.rme create mode 100644 Master/texmf-dist/tex/context/patterns/lang-zh.hyp create mode 100644 Master/texmf-dist/tex/context/patterns/lang-zh.pat create mode 100644 Master/texmf-dist/tex/context/patterns/lang-zh.rme delete mode 100644 Master/texmf-dist/tex/context/sample/demo-cld.cld delete mode 100644 Master/texmf-dist/tex/context/test/chem-str-test.tex delete mode 100644 Master/texmf-dist/tex/context/test/sort-lan-test.tex delete mode 100644 Master/texmf-dist/tex/context/test/x-cals-test.tex delete mode 100644 Master/texmf-dist/tex/context/test/x-cals-test.xml delete mode 100644 Master/texmf-dist/tex/context/test/x-cml-test.tex delete mode 100644 Master/texmf-dist/tex/context/test/x-cml-test.xml delete mode 100644 Master/texmf-dist/tex/context/test/x-ct-test.tex delete mode 100644 Master/texmf-dist/tex/context/test/x-ct-test.xml delete mode 100644 Master/texmf-dist/tex/context/test/x-mmp-test.tex delete mode 100644 Master/texmf-dist/tex/context/test/x-mmp-test.xml (limited to 'Master/texmf-dist') diff --git a/Master/texmf-dist/context/data/scite/cont-cs-scite.properties b/Master/texmf-dist/context/data/scite/cont-cs-scite.properties index fbc653e57cc..ddf42aacdd6 100644 --- a/Master/texmf-dist/context/data/scite/cont-cs-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-cs-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.cs=\ -CAP Cap Caps MESIC Rimskecislice SLOVA \ -SLOVO Slova Slovo VSEDNIDEN Znak Znaky \ -aktualnicislonadpisu aktualnidatum appendix barva bilemisto bublinkovanapoveda \ -bypassblocks cap cernalinka cernelinky chapter chem \ -cisla cislonadpisu citace citovat completecombinedlist completelistoffloats \ -completelistofsorts completelistofsynonyms completeregister coupledregister crlf datum \ -definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling definetype \ -definetypeface definuj definujbarvu definujblok definujbloksekce definujbuffer \ -definujfont definujformatodkazu definujinterakcnimenu definujinterakcnimenu definujkombinovanyseznam definujkonverzi \ -definujlogo definujnadpis definujobrazeksymbol definujodkaz definujodsazovani definujodstavce \ -definujopis definujoramovani definujoramovanytext definujpaletu definujplvouciobjekt definujpodpole \ -definujpole definujpopis definujpopisek definujprekryv definujpreskok definujprofil \ -definujprogram definujprostredizakladnihofontu definujrejstrik definujsablonutabulky definujsekci definujseznam \ -definujseznamodkazu definujskupinubarev definujstartstop definujsymbol definujsynonumumfontu definujsynonyma \ -definujtabelaci definujtext definujtrideni definujupravu definujvelikostpapiru definujverzi \ -definujvycet definujvystup definujzakladnifont definujzasobnikpoli definujznaceni description \ -dodrzujprofil dodrzujverzi dodrzujverziprofilu dvoustrannypapir emptylines enumeration \ -externiobraz footnotetext forceblocks framedtext hl hlavnijazyk \ -indentation ininner inouter instalacejazyka interakcnilista interakcnitlacitka \ -jazyk jdidolu jdina jdinabox klonujpole komentar \ -konvertujcislo kopirujpole korekcebilehomista labeling listsymbol loadsorts \ -loadsynonyms mapfontsize marginalnilinka marginalnitext matematika mediaeval \ -meritko mesic mezera mrizka nadpis nadruhyokraj \ -nadtrzeni nadtrzeno name naokraj nastavbarvu nastavbarvy \ -nastavbilamista nastavblok nastavbloksekce nastavbuffer nastavcernelinky nastavcislonadpisu \ -nastavcislostrany nastavcislovani nastavcislovaniodstavcu nastavcislovaniradku nastavcislovanistran nastavcitaci \ -nastavdefinicipoznamekpodcarou nastavdeleniplvoucichobjektu nastavdelitko nastavdolnitexty nastavexterniobrazy nastavhorejsek \ -nastavhornitexty nastavinterakci nastavinterakcnilistu nastavinterakcnimenu nastavinterakcniobrazovku nastavjazyk \ -nastavkapitalky nastavkombinovanyseznam nastavkomentar nastavlegendu nastavmarginalie nastavmarginalniblok \ -nastavmarginalnilinky nastavmeziradkovoumezeru nastavnadpis nastavnadpisy nastavodkazovani nastavodsazeni \ -nastavodsazovani nastavodstavce nastavopis nastavoramovanetexty nastavoramovani nastavorez \ -nastavotoceni nastavpaletu nastavplvouciobjekt nastavplvouciobjekty nastavpodcislostrany nastavpodtrzeni \ -nastavpole nastavpolozky nastavpopisek nastavpopisky nastavpopisy nastavpozadi \ -nastavpozadi nastavpoznamkypodcarou nastavprechodstrany nastavpreskok nastavprofily nastavprogramy \ -nastavprostredizakladnihofontu nastavpublikace nastavradkovani nastavradky nastavrastr nastavrejstrik \ -nastavrovnice nastavsadusymbolu nastavsekci nastavseznam nastavseznamodkazu nastavsirkucary \ -nastavsloupce nastavspodek nastavspojeni nastavsynchronizaci nastavsynchronizacnilistu nastavsynonyma \ -nastavsystem nastavtab nastavtabelaci nastavtabulky nastavtenkelinky nastavtext \ -nastavtexthlavicky nastavtextovelinky nastavtextpopisku nastavtexttexty nastavtextyupati nastavtextyzahlavi \ -nastavtlacitka nastavtoleranci nastavtrideni nastavtype nastavumisteniprotejsku nastavumistovani \ -nastavupati nastavupravu nastavurl nastavusporadani nastavvelikostpapiru nastavverze \ -nastavvsechnapole nastavvycty nastavvyplnovelinky nastavvyplnoveradky nastavvystup nastavvzhled \ -nastavzahlavi nastavzakladnifont nastavzarovnani nastavznaceni nastavzuzeni nastrane \ -nejakyradek nekde nextsection nivy nizky nocap \ -nop obrazovka odkaz odkaz odkaznadatum odkaznastranu \ -odkaznatext odsazovani okr opis opissoubor oramovani \ -oref orez otocit oznacverzi paragraph parovastrana \ -part pis placefloat placelistoffloats placelistofsorts placelistofsynonyms \ -placereferencelist podtrzeni podtrzeno pol pole polozka \ -polozky popisky porovnejpaletu porovnejskupinubarev pozadi pozice \ -poznamka poznamkapodcarou pref prelozit premistinamrizku prepninazakladnifont \ -preskoc preskrtnuti preskrtnuto prizpusobivepole prizpusobvzhled program \ -propojeneznaceni propojenydokument propojenyrejstrik publikace ran ref \ -register reservefloat reset resettextcontent resetznaceni rimskecislice \ -rozdelplvouciobjekt rozpojeneznaceni roztazene schovejbloky section sedabarva \ -seeregister settextcontent setupfonthandling setupfontsynonym setupforms setupinterlinespace2 \ -setupitemgroup setuplistalternative setuppaper setupstrut sloupec slovovpravo \ -sort spodek stanovcharakteristickuseznamu stanovcislonadpisu startalignment startbarva \ -startbuffer startbuffer startcitace startcolumns startcombination startdescription \ -startdocument startenumeration startfakt startfigure startfloattext startformula \ -startframedtext starthiding startinteraktivnimenu startitemgroup startkomentar startkomponenta \ -startlegend startline startlinecorrection startlinenumbering startlines startlocal \ -startlocalenvironment startlocalfootnotes startmakeup startmarginalnilinka startmarginblock startnamemakeup \ -startnarrower startopposite startoverlay startoverview startparagraph startpositioning \ -startpostponing startpozadi startprodukt startprofile startprojekt startprostredi \ -startregister startsymbolset startsynchronization starttable starttables starttabulate \ -starttextovalinka starttyping startunpacked startverze startzhustene stopalignment \ -stopbarva stopbuffer stopbuffer stopcitace stopcolumns stopcombination \ -stopdescription stopdocument stopenumeration stopfakt stopfigure stopfloattext \ -stopformula stopframedtext stophiding stopinteraktivnimenu stopitemgroup stopkomentar \ -stopkomponenta stoplegend stopline stoplinecorrection stoplinenumbering stoplines \ -stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginalnilinka stopmarginblock \ -stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph \ -stoppositioning stoppostponing stoppozadi stopprodukt stopprofile stopprojekt \ -stopprostredi stopsymbolset stopsynchronization stoptable stoptables stoptabulate \ -stoptextovalinka stoptyping stopunpacked stopverze stopzhustene strana \ -sub subject subsection subsubject subsubsection subsubsubject \ -sym symbol synchronizacnilista synchronizovat synonym tab \ -tecky tenkalinka tenkelinky tex texthlavicky textovalinka \ -textpopisku title tlacitko tref tvrdemezery typebuffer \ -typstrany ukazbarvu ukazexterniobrazy ukazmrizku ukaznastaveni ukazpaletu \ -ukazpodpery ukazpole ukazpostredizakladnihofontu ukazramecek ukazsadusymbolu ukazskupinubarev \ -ukazupravu ukazvytisk ukazvzhled ukazzakladnifont umistikombinovanyseznam umistikombinovanyseznam \ -umistilegendu umistiloga umistilokalnipoznamkypodcarou umistinadsebe umistinamrizku umistipodrovnici \ -umistipoznamkypodcarou umistirejstrik umistirejstrik umistirovnici umistiseznam umistivedlesebe \ -umistizalozky usedirectory usetypescript usetypescriptfile uzijURL uzijbloky \ -uzijexternidokument uzijexterniobraz uzijexternisoubor uzijexternisoubory uzijexternizvuk uzijkodovani \ -uzijmodul uzijodkazy uzijprikazy uzijspeciality uzijsymbol verze \ -vl vlasovalinka vlevo vpravo vradku vsedniden \ -vyberbloky vyberpapir vyberverzi vyplnenytext vyplnovelinky vyplnovepole \ -vyplnovyradek vysoky zablokujinterakcnimenu zachovejbloky zadnamezera zadnebilemisto \ -zadnedalsibloky zadnedalsisoubory zadnehorniadolniradky zadneodsazovani zadnezahlaviaupati zadneznaceni \ -zadnyseznam zalozka zapisdorejstriku zapisdoseznamu zapisdoseznamuodkazu zapismeziseznam \ -zaramovani zarovnanonastred zarovnanovlevo zarovnanovpravo zasobnikpoli zaznamovepole \ -zhustene ziskejbuffer ziskejznaceni zlomek znaceni znak \ -znaky zpracujbloky zpracujstranu zrcadlit zref \ No newline at end of file +keywordclass.macros.context.cs=CAP Cap Caps MESIC Rimskecislice SLOVA SLOVO Slova Slovo VSEDNIDEN Znak Znaky aktualnicislonadpisu aktualnidatum appendix barva bilemisto bublinkovanapoveda bypassblocks cap cernalinka cernelinky chapter chem cisla cislonadpisu citace citovat completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister coupledregister crlf datum definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling definetype definetypeface definuj definujbarvu definujblok definujbloksekce definujbuffer definujfont definujformatodkazu definujinterakcnimenu definujinterakcnimenu definujkombinovanyseznam definujkonverzi definujlogo definujnadpis definujobrazeksymbol definujodkaz definujodsazovani definujodstavce definujopis definujoramovani definujoramovanytext definujpaletu definujplvouciobjekt definujpodpole definujpole definujpopis definujpopisek definujprekryv definujpreskok definujprofil definujprogram definujprostredizakladnihofontu definujrejstrik definujsablonutabulky definujsekci definujseznam definujseznamodkazu definujskupinubarev definujstartstop definujsymbol definujsynonumumfontu definujsynonyma definujtabelaci definujtext definujtrideni definujupravu definujvelikostpapiru definujverzi definujvycet definujvystup definujzakladnifont definujzasobnikpoli definujznaceni description dodrzujprofil dodrzujverzi dodrzujverziprofilu dvoustrannypapir emptylines enumeration externiobraz footnotetext forceblocks framedtext hl hlavnijazyk indentation ininner inouter instalacejazyka interakcnilista interakcnitlacitka jazyk jdidolu jdina jdinabox klonujpole komentar konvertujcislo kopirujpole korekcebilehomista labeling listsymbol loadsorts loadsynonyms mapfontsize marginalnilinka marginalnitext matematika mediaeval meritko mesic mezera mrizka nadpis nadruhyokraj name naokraj nastavbarvu nastavbarvy nastavbilamista nastavblok nastavbloksekce nastavbuffer nastavcernelinky nastavcislonadpisu nastavcislostrany nastavcislovani nastavcislovaniodstavcu nastavcislovaniradku nastavcislovanistran nastavcitaci nastavdefinicipoznamekpodcarou nastavdeleniplvoucichobjektu nastavdelitko nastavdolnitexty nastavexterniobrazy nastavhorejsek nastavhornitexty nastavinterakci nastavinterakcnilistu nastavinterakcnimenu nastavinterakcniobrazovku nastavjazyk nastavkapitalky nastavkombinovanyseznam nastavkomentar nastavlegendu nastavmarginalie nastavmarginalniblok nastavmarginalnilinky nastavmeziradkovoumezeru nastavnadpis nastavnadpisy nastavodkazovani nastavodsazeni nastavodsazovani nastavodstavce nastavopis nastavoramovanetexty nastavoramovani nastavorez nastavotoceni nastavpaletu nastavplvouciobjekt nastavplvouciobjekty nastavpodcislostrany nastavpodtrzeni nastavpole nastavpolozky nastavpopisek nastavpopisky nastavpopisy nastavpozadi nastavpozadi nastavpoznamkypodcarou nastavprechodstrany nastavpreskok nastavprofily nastavprogramy nastavprostredizakladnihofontu nastavpublikace nastavradkovani nastavradky nastavrastr nastavrejstrik nastavrovnice nastavsadusymbolu nastavsekci nastavseznam nastavseznamodkazu nastavsirkucary nastavsloupce nastavspodek nastavspojeni nastavsynchronizaci nastavsynchronizacnilistu nastavsynonyma nastavsystem nastavtab nastavtabelaci nastavtabulky nastavtenkelinky nastavtext nastavtexthlavicky nastavtextovelinky nastavtextpopisku nastavtexttexty nastavtextyupati nastavtextyzahlavi nastavtlacitka nastavtoleranci nastavtrideni nastavtype nastavumisteniprotejsku nastavumistovani nastavupati nastavupravu nastavurl nastavusporadani nastavvelikostpapiru nastavverze nastavvsechnapole nastavvycty nastavvyplnovelinky nastavvyplnoveradky nastavvystup nastavvzhled nastavzahlavi nastavzakladnifont nastavzarovnani nastavznaceni nastavzuzeni nastrane nejakyradek nekde nextsection nivy nizky nocap nop obrazovka odkaz odkaz odkaznadatum odkaznastranu odkaznatext odsazovani okr opis opissoubor oramovani oref orez otocit overbar overbars overstrike overstrikes oznacverzi paragraph parovastrana part pis placefloat placelistoffloats placelistofsorts placelistofsynonyms placereferencelist pol pole polozka polozky popisky porovnejpaletu porovnejskupinubarev pozadi pozice poznamka poznamkapodcarou pref prelozit premistinamrizku prepninazakladnifont preskoc prizpusobivepole prizpusobvzhled program propojeneznaceni propojenydokument propojenyrejstrik publikace ran ref register reservefloat reset resettextcontent resetznaceni rimskecislice rozdelplvouciobjekt rozpojeneznaceni roztazene schovejbloky section sedabarva seeregister settextcontent setupfonthandling setupfontsynonym setupforms setupinterlinespace2 setupitemgroup setuplistalternative setuppaper setupstrut sloupec slovovpravo sort spodek stanovcharakteristickuseznamu stanovcislonadpisu startalignment startbarva startbuffer startbuffer startcitace startcolumns startcombination startdescription startdocument startenumeration startfakt startfigure startfloattext startformula startframedtext starthiding startinteraktivnimenu startitemgroup startkomentar startkomponenta startlegend startline startlinecorrection startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginalnilinka startmarginblock startnamemakeup startnarrower startopposite startoverlay startoverview startparagraph startpositioning startpostponing startpozadi startprodukt startprofile startprojekt startprostredi startregister startsymbolset startsynchronization starttable starttables starttabulate starttextovalinka starttyping startunpacked startverze startzhustene stopalignment stopbarva stopbuffer stopbuffer stopcitace stopcolumns stopcombination stopdescription stopdocument stopenumeration stopfakt stopfigure stopfloattext stopformula stopframedtext stophiding stopinteraktivnimenu stopitemgroup stopkomentar stopkomponenta stoplegend stopline stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginalnilinka stopmarginblock stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing stoppozadi stopprodukt stopprofile stopprojekt stopprostredi stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptextovalinka stoptyping stopunpacked stopverze stopzhustene strana sub subject subsection subsubject subsubsection subsubsubject sym symbol synchronizacnilista synchronizovat synonym tab tecky tenkalinka tenkelinky tex texthlavicky textovalinka textpopisku title tlacitko tref tvrdemezery typebuffer typstrany ukazbarvu ukazexterniobrazy ukazmrizku ukaznastaveni ukazpaletu ukazpodpery ukazpole ukazpostredizakladnihofontu ukazramecek ukazsadusymbolu ukazskupinubarev ukazupravu ukazvytisk ukazvzhled ukazzakladnifont umistikombinovanyseznam umistikombinovanyseznam umistilegendu umistiloga umistilokalnipoznamkypodcarou umistinadsebe umistinamrizku umistipodrovnici umistipoznamkypodcarou umistirejstrik umistirejstrik umistirovnici umistiseznam umistivedlesebe umistizalozky underbar underbars usedirectory usetypescript usetypescriptfile uzijURL uzijbloky uzijexternidokument uzijexterniobraz uzijexternisoubor uzijexternisoubory uzijexternizvuk uzijkodovani uzijmodul uzijodkazy uzijprikazy uzijspeciality uzijsymbol verze vl vlasovalinka vlevo vpravo vradku vsedniden vyberbloky vyberpapir vyberverzi vyplnenytext vyplnovelinky vyplnovepole vyplnovyradek vysoky zablokujinterakcnimenu zachovejbloky zadnamezera zadnebilemisto zadnedalsibloky zadnedalsisoubory zadnehorniadolniradky zadneodsazovani zadnezahlaviaupati zadneznaceni zadnyseznam zalozka zapisdorejstriku zapisdoseznamu zapisdoseznamuodkazu zapismeziseznam zaramovani zarovnanonastred zarovnanovlevo zarovnanovpravo zasobnikpoli zaznamovepole zhustene ziskejbuffer ziskejznaceni zlomek znaceni znak znaky zpracujbloky zpracujstranu zrcadlit zref \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/cont-de-scite.properties b/Master/texmf-dist/context/data/scite/cont-de-scite.properties index f749bfe9713..b0e05b91aa5 100644 --- a/Master/texmf-dist/context/data/scite/cont-de-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-de-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.de=\ -Buchstabe Buchstaben CAP Cap Caps MONAT \ -Roemischezahlen WOCHENTAG WOERTER WORT Woerter Wort \ -amgitterausrichten amgitterneuausrichten appendix aufseite ausfuellfeld ausfuelltext \ -ausschnitt bearbeitebloecke bearbeiteseite behaltebloecke bei benutzekodierung \ -benutzespezielles beschrifteversion beschriftung bestimmekopfnummer bestimmelistencharakeristika bildschirm \ -blanko bookmark bruch buchstabe buchstaben but \ -bypassblocks cap chapter chem completecombinedlist completelistoffloats \ -completelistofsorts completelistofsynonyms completeregister coupledregister crlf datum \ -definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling defineoutput \ -defineschriftsynonym definetype definetypeface definiereabbsymbol definiereabsaetze definiereabschnitt \ -definiereabschnittsblock definierebeschreibung definierebeschreibungen definierebeschriftung definiereblanko definiereblock \ -definiereeinzug definierefarbe definierefarbengruppe definierefeld definierefeldstapel definierefliesstext \ -definierefliesstextumgebung definieregleitobjekt definiereinteraktionsmenue definiereinteraktionsmenue definierekonversion definierelabel \ -definiereliste definierelogo definieren definierenummerierung definiereoverlay definierepalette \ -definierepapierformat definiereprofil definiereprogramme definierepuffer definierereferenz definierereferenzformat \ -definierereferenzliste definiereregister definiereschrift definieresortieren definierestartstop definieresubfeld \ -definieresymbol definieresynonyme definieretabellenvorlage definieretabulator definieretext definieretippen \ -definiereueberschrift definiereumbruch definiereumrahmt definiereumrahmtertext definiereversion definierezusammengestellteliste \ -description doppelseite doppelseitigespapier drehen duennelinie duennerumriss \ -durchgestrichen durchstreichen einezeile einziehen emptylines entknuepfebeschriftung \ -enumeration externeabbildung farbe feld feldstapel festesspatium \ -folgeprofil folgeprofilversion folgeversion footnotetext forceblocks format \ -framedtext fussnote gefuelltesrechteck gefuelltezeile gestreckt gitter \ -graufarbe haarlinie hauptsprache heutigesdatum heutigeskopfnummer hintergrund \ -hl hoch holebeschriftung holepuffer imlinken imrechten \ -imumriss in inaktiviereinteraktionsmenue inanderermarginale indentation ininner \ -inmarginalie inouter installieresprache interaktionsbalken interaktionsknopfe inzeile \ -irgendwo its keinebeschriftung keinebloeckemehr keinedateienmehr keinekopfundfusszeilen \ -keineliste keinspatium keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld \ -knopf kommentar konvertierezahl kopf kopfnummer kopierefeld \ -korrigierezwischenraum labeling labels labeltext linksbuendig listsymbol \ -loadsorts loadsynonyms mapfontsize mar marginallinie marginaltext \ -mathematik mediaeval monat nachunten name nextsection \ -nichteinziehen nocap nop notiz paragraph part \ -passelayoutan passendfeld placefloat placelistoffloats placelistofsorts placelistofsynonyms \ -placereferencelist platzierebookmarks platziereformel platzierefussnoten platzierelegende platziereliste \ -platzierelogo platzierelokalefussnoten platzierenebeneinander platziereregister platziereregister platziereuntereinander \ -platziereunterformel platzierezusammengestellteliste platzierezusammengestellteliste pos position posten \ -programm publikation punkt ran rechteck rechtecke \ -rechtsbuendig ref referenz register registrierefelder reservefloat \ -resettextcontent roemischezahlen ruecksetzten ruecksetztenbeschriftung schreibezumregister schreibezurliste \ -schreibezurreferenzliste schreibezwischenliste section seeregister seite seitenreferenz \ -seitentyp settext setupfonthandling setupfontsynonym setupforms setupinterlinespace2 \ -setupitemgroup setuplistalternative setuppaper setupstrut showsymbolset sort \ -spalte spatium spiegeln sprache startalignment startbuffer \ -startbuffer startcolumns startcombination startdescription startdocument startenumeration \ -startfarbe startfigure startfloattext startformula startframedtext startgeg \ -starthiding starthintergrund startinteraktionsmenue startitemgroup startkleinerdurchschuss startkommentar \ -startkomponente startlegend startline startlinecorrection startlinenumbering startlines \ -startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginallinie startmarginblock \ -startnamemakeup startnarrower startopposite startoverlay startoverview startparagraph \ -startpositioning startpostponing startprodukt startprofile startprojekt startregister \ -startsymbolset startsynchronization starttable starttables starttabulate starttextlinie \ -starttyping startumgebung startunpacked startversion startzitat stelleabsaetzeein \ -stelleabsatznummerierungein stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleausgabeein stelleausrichtungein \ -stelleausschnittein stellebeschreibungein stellebeschriftungein stellebilderunterschriftein stellebildunterschriftein stellebindestrichein \ -stelleblankoein stelleblockein stelledrehenein stelleduennerumrissein stelleeinziehenein stelleeinzuegein \ -stelleengerein stelleexterneabbildungenein stellefarbeein stellefarbenein stellefeldein stellefelderin \ -stellefliesstextein stellefliesstextumgebungein stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein \ -stellefusszeilentextein stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein stellegleitobjektein \ -stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein stelleinteraktionsbalkenein stelleinteraktionsbildschirmein \ -stelleinteraktionsmenueein stelleknopfein stellekombinationein stellekommentarein stellekopfzahlein stellekopfzeileein \ -stellekopfzeilentextein stellelabeltextein stellelayoutein stellelegendeein stellelinienbreiteein stellelisteein \ -stellemarginalblockein stellemarginallinieein stellenobenein stellenummerierungein stellepaletteein stellepapierformatein \ -stelleplatziegeteiltegleitobjekt stellepositionierenein stellepostenein stelleprofilein stelleprogrammein stellepublikationein \ -stellepufferein stellerasterein stellerechteckein stellereferenzierenein stellereferenzlisteein stelleregisterein \ -stelleseitennummerein stelleseitennummeriernungein stelleseitenuebergangein stellesortierenein stellespaltenein stellespatiumein \ -stellespracheein stellesymbolsetein stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein \ -stelletabein stelletabellenein stelletabulatorein stelletextein stelletextobenein stelletexttexteein \ -stelletextumrissein stelletextuntenein stelletipein stelletippenein stelletoleranzein stelleueberschriftein \ -stelleueberschriftenein stelleueberschrifttextein stelleumbruchein stelleumrahmtein stelleumrahmtetexteein stelleuntenein \ -stelleunterseitennummerein stelleunterstreichenein stelleurlein stelleversalienein stelleversionein stellezeilenabstandein \ -stellezeilenein stellezeilennumerierungein stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment \ -stopbuffer stopbuffer stopcolumns stopcombination stopdescription stopdocument \ -stopenumeration stopfarbe stopfigure stopfloattext stopformula stopframedtext \ -stopgeg stophiding stophintergrund stopinteraktionsmenue stopitemgroup stopkleinerdurchschuss \ -stopkommentar stopkomponente stoplegend stopline stoplinecorrection stoplinenumbering \ -stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginallinie \ -stopmarginblock stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview \ -stopparagraph stoppositioning stoppostponing stopprodukt stopprofile stopprojekt \ -stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptextlinie \ -stoptyping stopumgebung stopunpacked stopversion stopzitat sub \ -subject subsection subsubject subsubsection subsubsubject sym \ -symbol synchronisationsbalken synchronisieren synonym tab teilegleitobjekt \ -tex textlinie textreferenz tief tiho tip \ -tippedatei tippen tippepuffer title tooltip ueber \ -ueberschrifttext uebersetzten ueberstreichen ueberstrichen umrahmt unterstreichen \ -unterstrichen usedirectory usetypescript usetypescriptfile verbergebloecke vergleichefarbengruppe \ -vergleichepalette verknuepfebeschriftung verknuepfedokument verknuepfregister version verweis \ -verweisdatum verwendeURL verwendebefehl verwendebloecke verwendeexteresdokument verwendeexterneabbildung \ -verwendeexternedatei verwendeexternedateien verwendeexternestonstueck verwendemodul verwendereferenzen verwendesymbole \ -vl von waehlebloeckeaus waehlepapieraus waehleversionaus wechselezumfliesstext \ -wochentag wortrechts zeigedruck zeigeeinstellungen zeigeexterneabbildungen zeigefarbe \ -zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung zeigegitter zeigelayout \ -zeigepalette zeigerahmen zeigestruts zeigeumbruch zentriert ziffern \ -zitat zitieren zu zurbox zwischenraum \ No newline at end of file +keywordclass.macros.context.de=Buchstabe Buchstaben CAP Cap Caps MONAT Roemischezahlen WOCHENTAG WOERTER WORT Woerter Wort amgitterausrichten amgitterneuausrichten appendix aufseite ausfuellfeld ausfuelltext ausschnitt bearbeitebloecke bearbeiteseite behaltebloecke bei benutzekodierung benutzespezielles beschrifteversion beschriftung bestimmekopfnummer bestimmelistencharakeristika bildschirm blanko bookmark bruch buchstabe buchstaben but bypassblocks cap chapter chem completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister coupledregister crlf datum definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling defineoutput defineschriftsynonym definetype definetypeface definiereabbsymbol definiereabsaetze definiereabschnitt definiereabschnittsblock definierebeschreibung definierebeschreibungen definierebeschriftung definiereblanko definiereblock definiereeinzug definierefarbe definierefarbengruppe definierefeld definierefeldstapel definierefliesstext definierefliesstextumgebung definieregleitobjekt definiereinteraktionsmenue definiereinteraktionsmenue definierekonversion definierelabel definiereliste definierelogo definieren definierenummerierung definiereoverlay definierepalette definierepapierformat definiereprofil definiereprogramme definierepuffer definierereferenz definierereferenzformat definierereferenzliste definiereregister definiereschrift definieresortieren definierestartstop definieresubfeld definieresymbol definieresynonyme definieretabellenvorlage definieretabulator definieretext definieretippen definiereueberschrift definiereumbruch definiereumrahmt definiereumrahmtertext definiereversion definierezusammengestellteliste description doppelseite doppelseitigespapier drehen duennelinie duennerumriss einezeile einziehen emptylines entknuepfebeschriftung enumeration externeabbildung farbe feld feldstapel festesspatium folgeprofil folgeprofilversion folgeversion footnotetext forceblocks format framedtext fussnote gefuelltesrechteck gefuelltezeile gestreckt gitter graufarbe haarlinie hauptsprache heutigesdatum heutigeskopfnummer hintergrund hl hoch holebeschriftung holepuffer imlinken imrechten imumriss in inaktiviereinteraktionsmenue inanderermarginale indentation ininner inmarginalie inouter installieresprache interaktionsbalken interaktionsknopfe inzeile irgendwo its keinebeschriftung keinebloeckemehr keinedateienmehr keinekopfundfusszeilen keineliste keinspatium keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld knopf kommentar konvertierezahl kopf kopfnummer kopierefeld korrigierezwischenraum labeling labels labeltext linksbuendig listsymbol loadsorts loadsynonyms mapfontsize mar marginallinie marginaltext mathematik mediaeval monat nachunten name nextsection nichteinziehen nocap nop notiz overbar overbars overstrike overstrikes paragraph part passelayoutan passendfeld placefloat placelistoffloats placelistofsorts placelistofsynonyms placereferencelist platzierebookmarks platziereformel platzierefussnoten platzierelegende platziereliste platzierelogo platzierelokalefussnoten platzierenebeneinander platziereregister platziereregister platziereuntereinander platziereunterformel platzierezusammengestellteliste platzierezusammengestellteliste pos position posten programm publikation punkt ran rechteck rechtecke rechtsbuendig ref referenz register registrierefelder reservefloat resettextcontent roemischezahlen ruecksetzten ruecksetztenbeschriftung schreibezumregister schreibezurliste schreibezurreferenzliste schreibezwischenliste section seeregister seite seitenreferenz seitentyp settext setupfonthandling setupfontsynonym setupforms setupinterlinespace2 setupitemgroup setuplistalternative setuppaper setupstrut showsymbolset sort spalte spatium spiegeln sprache startalignment startbuffer startbuffer startcolumns startcombination startdescription startdocument startenumeration startfarbe startfigure startfloattext startformula startframedtext startgeg starthiding starthintergrund startinteraktionsmenue startitemgroup startkleinerdurchschuss startkommentar startkomponente startlegend startline startlinecorrection startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginallinie startmarginblock startnamemakeup startnarrower startopposite startoverlay startoverview startparagraph startpositioning startpostponing startprodukt startprofile startprojekt startregister startsymbolset startsynchronization starttable starttables starttabulate starttextlinie starttyping startumgebung startunpacked startversion startzitat stelleabsaetzeein stelleabsatznummerierungein stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleausgabeein stelleausrichtungein stelleausschnittein stellebeschreibungein stellebeschriftungein stellebilderunterschriftein stellebildunterschriftein stellebindestrichein stelleblankoein stelleblockein stelledrehenein stelleduennerumrissein stelleeinziehenein stelleeinzuegein stelleengerein stelleexterneabbildungenein stellefarbeein stellefarbenein stellefeldein stellefelderin stellefliesstextein stellefliesstextumgebungein stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein stellefusszeilentextein stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein stellegleitobjektein stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein stelleinteraktionsbalkenein stelleinteraktionsbildschirmein stelleinteraktionsmenueein stelleknopfein stellekombinationein stellekommentarein stellekopfzahlein stellekopfzeileein stellekopfzeilentextein stellelabeltextein stellelayoutein stellelegendeein stellelinienbreiteein stellelisteein stellemarginalblockein stellemarginallinieein stellenobenein stellenummerierungein stellepaletteein stellepapierformatein stelleplatziegeteiltegleitobjekt stellepositionierenein stellepostenein stelleprofilein stelleprogrammein stellepublikationein stellepufferein stellerasterein stellerechteckein stellereferenzierenein stellereferenzlisteein stelleregisterein stelleseitennummerein stelleseitennummeriernungein stelleseitenuebergangein stellesortierenein stellespaltenein stellespatiumein stellespracheein stellesymbolsetein stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein stelletabein stelletabellenein stelletabulatorein stelletextein stelletextobenein stelletexttexteein stelletextumrissein stelletextuntenein stelletipein stelletippenein stelletoleranzein stelleueberschriftein stelleueberschriftenein stelleueberschrifttextein stelleumbruchein stelleumrahmtein stelleumrahmtetexteein stelleuntenein stelleunterseitennummerein stelleunterstreichenein stelleurlein stelleversalienein stelleversionein stellezeilenabstandein stellezeilenein stellezeilennumerierungein stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment stopbuffer stopbuffer stopcolumns stopcombination stopdescription stopdocument stopenumeration stopfarbe stopfigure stopfloattext stopformula stopframedtext stopgeg stophiding stophintergrund stopinteraktionsmenue stopitemgroup stopkleinerdurchschuss stopkommentar stopkomponente stoplegend stopline stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginallinie stopmarginblock stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing stopprodukt stopprofile stopprojekt stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptextlinie stoptyping stopumgebung stopunpacked stopversion stopzitat sub subject subsection subsubject subsubsection subsubsubject sym symbol synchronisationsbalken synchronisieren synonym tab teilegleitobjekt tex textlinie textreferenz tief tiho tip tippedatei tippen tippepuffer title tooltip ueber ueberschrifttext uebersetzten umrahmt underbar underbars usedirectory usetypescript usetypescriptfile verbergebloecke vergleichefarbengruppe vergleichepalette verknuepfebeschriftung verknuepfedokument verknuepfregister version verweis verweisdatum verwendeURL verwendebefehl verwendebloecke verwendeexteresdokument verwendeexterneabbildung verwendeexternedatei verwendeexternedateien verwendeexternestonstueck verwendemodul verwendereferenzen verwendesymbole vl von waehlebloeckeaus waehlepapieraus waehleversionaus wechselezumfliesstext wochentag wortrechts zeigedruck zeigeeinstellungen zeigeexterneabbildungen zeigefarbe zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung zeigegitter zeigelayout zeigepalette zeigerahmen zeigestruts zeigeumbruch zentriert ziffern zitat zitieren zu zurbox zwischenraum \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/cont-en-scite.properties b/Master/texmf-dist/context/data/scite/cont-en-scite.properties index 72414c1132f..a83042bf6eb 100644 --- a/Master/texmf-dist/context/data/scite/cont-en-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-en-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.en=\ -CAP Cap Caps Character Characters MONTH \ -Romannumerals WEEKDAY WORD WORDS Word Words \ -about adaptlayout appendix at atpage background \ -blackrule blackrules blank bookmark but button \ -bypassblocks cap chapter character characters chem \ -clip clonefield color column comment comparecolorgroup \ -comparepalet completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister \ -convertnumber copyfield correctwhitespace coupledocument coupledregister couplemarking \ -couplepage couplepaper coupleregister crlf currentdate currentheadnumber \ -date decouplemarking define defineblank defineblock definebodyfont \ -definebodyfontDEF definebodyfontREF definebodyfontenvironment definebuffer definecolor definecolorgroup \ -definecombinedlist defineconversion definedescription definedfont defineenumeration definefield \ -definefieldstack definefiguresymbol definefloat definefont definefontfeature definefonthandling \ -definefontsynonym defineframed defineframedtext definehead defineindenting defineinteractionmenu \ -defineinteractionmenu definelabel definelist definelogo definemakeup definemarking \ -defineoutput defineoverlay definepalet definepapersize defineparagraphs defineprofile \ -defineprogram definereference definereferenceformat definereferencelist defineregister definesection \ -definesectionblock definesorting definestartstop definesubfield definesymbol definesynonyms \ -definetabletemplate definetabulate definetext definetype definetypeface definetyping \ -defineversion description determineheadnumber determinelistcharacteristics disableinteractionmenu emptylines \ -enumeration externalfigure field fieldstack fillinfield fillinline \ -fillinrules fillintext fitfield fixedspaces followprofile followprofileversion \ -followversion footnote footnotetext forceblocks fraction framed \ -framedtext from getbuffer getmarking godown goto \ -gotobox graycolor grid hairline head headnumber \ -headtext hideblocks high hl in indentation \ -indenting inframed ininner inleft inline inmargin \ -inothermargin inouter inright installlanguage interactionbar interactionbuttons \ -item items its keepblocks labeling labels \ -labeltext language leftaligned listsymbol loadsorts loadsynonyms \ -logfields lohi low mainlanguage mapfontsize mar \ -marginrule margintext marking markversion mathematics mediaeval \ -midaligned mirror month moveongrid name nextsection \ -nocap noheaderandfooterlines noindenting nolist nomarking nomoreblocks \ -nomorefiles nop nospace note notopandbottomlines nowhitespace \ -numbers overbar overbars overstrike overstrikes packed \ -page pagereference pagetype paragraph part periods \ -placebookmarks placecombinedlist placecombinedlist placefloat placefootnotes placeformula \ -placelegend placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \ -placelogos placeongrid placeontopofeachother placereferencelist placeregister placeregister \ -placesidebyside placesubformula position processblocks processpage program \ -publication quotation quote ran ref reference \ -referral referraldate register reservefloat reset resetmarking \ -resettextcontent rightaligned romannumerals rotate scale screen \ -section seeregister selectblocks selectpaper selectversion settextcontent \ -setupalign setuparranging setupbackground setupbackgrounds setupblackrules setupblank \ -setupblock setupbodyfont setupbodyfontenvironment setupbottom setupbottomtexts setupbuffer \ -setupbuttons setupcapitals setupcaption setupcaptions setupclipping setupcolor \ -setupcolors setupcolumns setupcombinations setupcombinedlist setupcomment setupdescriptions \ -setupenumerations setupexternalfigures setupfield setupfields setupfillinlines setupfillinrules \ -setupfloat setupfloats setupfloatsplitting setupfonthandling setupfontsynonym setupfooter \ -setupfootertexts setupfootnotedefinition setupfootnotes setupforms setupformulae setupframed \ -setupframedtexts setuphead setupheader setupheadertexts setupheadnumber setupheads \ -setupheadtext setuphyphenmark setupindentations setupindenting setupinmargin setupinteraction \ -setupinteractionbar setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 setupitemgroup \ -setupitems setuplabeltext setuplanguage setuplayout setuplegend setuplinenumbering \ -setuplines setuplinewidth setuplist setuplistalternative setupmakeup setupmarginblocks \ -setupmarginrules setupmarking setupnarrower setupnumbering setupoppositeplacing setupoutput \ -setuppagenumber setuppagenumbering setuppagetransitions setuppalet setuppaper setuppapersize \ -setupparagraphnumbering setupparagraphs setuppositioning setupprofiles setupprograms setuppublications \ -setupquote setupreferencelist setupreferencing setupregister setuprotate setupscreens \ -setupsection setupsectionblock setupsorting setupspacing setupstrut setupsubpagenumber \ -setupsymbolset setupsynchronization setupsynchronizationbar setupsynonyms setupsystem setuptab \ -setuptables setuptabulate setuptext setuptextrules setuptexttexts setupthinrules \ -setuptolerance setuptop setuptoptexts setuptype setuptyping setupunderbar \ -setupurl setupversions setupwhitespace showbodyfont showbodyfontenvironment showcolor \ -showcolorgroup showexternalfigures showfields showframe showgrid showlayout \ -showmakeup showpalet showprint showsetups showstruts showsymbolset \ -someline somewhere sort space splitfloat startalignment \ -startbackground startbuffer startbuffer startcolor startcolumns startcombination \ -startcomment startcomponent startdescription startdocument startenumeration startenvironment \ -startfact startfigure startfloattext startformula startframedtext starthiding \ -startinteractionmenu startitemgroup startlegend startline startlinecorrection startlinenumbering \ -startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \ -startmarginrule startnamemakeup startnarrower startopposite startoverlay startoverview \ -startpacked startparagraph startpositioning startpostponing startproduct startprofile \ -startproject startquotation startregister startsymbolset startsynchronization starttable \ -starttables starttabulate starttextrule starttyping startunpacked startversion \ -stopalignment stopbackground stopbuffer stopbuffer stopcolor stopcolumns \ -stopcombination stopcomment stopcomponent stopdescription stopdocument stopenumeration \ -stopenvironment stopfact stopfigure stopfloattext stopformula stopframedtext \ -stophiding stopinteractionmenu stopitemgroup stoplegend stopline stoplinecorrection \ -stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \ -stopmarginblock stopmarginrule stopnamemakeup stopnarrower stopopposite stopoverlay \ -stopoverview stoppacked stopparagraph stoppositioning stoppostponing stopproduct \ -stopprofile stopproject stopquotation stopsymbolset stopsynchronization stoptable \ -stoptables stoptabulate stoptextrule stoptyping stopunpacked stopversion \ -stretched sub subject subsection subsubject subsubsection \ -subsubsubject switchtobodyfont sym symbol synchronizationbar synchronize \ -synonym tab tex textreference textrule thinrule \ -thinrules title tooltip translate typ type \ -typebuffer typefile underbar underbars useURL useblocks \ -usecommands usedirectory useencoding useexternaldocument useexternalfigure useexternalfile \ -useexternalfiles useexternalsoundtrack usemodule usereferences usespecials usesymbols \ -usetypescript usetypescriptfile version vl weekday whitespace \ -wordright writebetweenlist writetolist writetoreferencelist writetoregister \ No newline at end of file +keywordclass.macros.context.en=CAP Cap Caps Character Characters MONTH Romannumerals WEEKDAY WORD WORDS Word Words about adaptlayout appendix at atpage background blackrule blackrules blank bookmark but button bypassblocks cap chapter character characters chem clip clonefield color column comment comparecolorgroup comparepalet completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister convertnumber copyfield correctwhitespace coupledocument coupledregister couplemarking couplepage couplepaper coupleregister crlf currentdate currentheadnumber date decouplemarking define defineblank defineblock definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment definebuffer definecolor definecolorgroup definecombinedlist defineconversion definedescription definedfont defineenumeration definefield definefieldstack definefiguresymbol definefloat definefont definefontfeature definefonthandling definefontsynonym defineframed defineframedtext definehead defineindenting defineinteractionmenu defineinteractionmenu definelabel definelist definelogo definemakeup definemarking defineoutput defineoverlay definepalet definepapersize defineparagraphs defineprofile defineprogram definereference definereferenceformat definereferencelist defineregister definesection definesectionblock definesorting definestartstop definesubfield definesymbol definesynonyms definetabletemplate definetabulate definetext definetype definetypeface definetyping defineversion description determineheadnumber determinelistcharacteristics disableinteractionmenu emptylines enumeration externalfigure field fieldstack fillinfield fillinline fillinrules fillintext fitfield fixedspaces followprofile followprofileversion followversion footnote footnotetext forceblocks fraction framed framedtext from getbuffer getmarking godown goto gotobox graycolor grid hairline head headnumber headtext hideblocks high hl in indentation indenting inframed ininner inleft inline inmargin inothermargin inouter inright installlanguage interactionbar interactionbuttons item items its keepblocks labeling labels labeltext language leftaligned listsymbol loadsorts loadsynonyms logfields lohi low mainlanguage mapfontsize mar marginrule margintext marking markversion mathematics mediaeval midaligned mirror month moveongrid name nextsection nocap noheaderandfooterlines noindenting nolist nomarking nomoreblocks nomorefiles nop nospace note notopandbottomlines nowhitespace numbers overbar overbars overstrike overstrikes packed page pagereference pagetype paragraph part periods placebookmarks placecombinedlist placecombinedlist placefloat placefootnotes placeformula placelegend placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes placelogos placeongrid placeontopofeachother placereferencelist placeregister placeregister placesidebyside placesubformula position processblocks processpage program publication quotation quote ran ref reference referral referraldate register reservefloat reset resetmarking resettextcontent rightaligned romannumerals rotate scale screen section seeregister selectblocks selectpaper selectversion settextcontent setupalign setuparranging setupbackground setupbackgrounds setupblackrules setupblank setupblock setupbodyfont setupbodyfontenvironment setupbottom setupbottomtexts setupbuffer setupbuttons setupcapitals setupcaption setupcaptions setupclipping setupcolor setupcolors setupcolumns setupcombinations setupcombinedlist setupcomment setupdescriptions setupenumerations setupexternalfigures setupfield setupfields setupfillinlines setupfillinrules setupfloat setupfloats setupfloatsplitting setupfonthandling setupfontsynonym setupfooter setupfootertexts setupfootnotedefinition setupfootnotes setupforms setupformulae setupframed setupframedtexts setuphead setupheader setupheadertexts setupheadnumber setupheads setupheadtext setuphyphenmark setupindentations setupindenting setupinmargin setupinteraction setupinteractionbar setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 setupitemgroup setupitems setuplabeltext setuplanguage setuplayout setuplegend setuplinenumbering setuplines setuplinewidth setuplist setuplistalternative setupmakeup setupmarginblocks setupmarginrules setupmarking setupnarrower setupnumbering setupoppositeplacing setupoutput setuppagenumber setuppagenumbering setuppagetransitions setuppalet setuppaper setuppapersize setupparagraphnumbering setupparagraphs setuppositioning setupprofiles setupprograms setuppublications setupquote setupreferencelist setupreferencing setupregister setuprotate setupscreens setupsection setupsectionblock setupsorting setupspacing setupstrut setupsubpagenumber setupsymbolset setupsynchronization setupsynchronizationbar setupsynonyms setupsystem setuptab setuptables setuptabulate setuptext setuptextrules setuptexttexts setupthinrules setuptolerance setuptop setuptoptexts setuptype setuptyping setupunderbar setupurl setupversions setupwhitespace showbodyfont showbodyfontenvironment showcolor showcolorgroup showexternalfigures showfields showframe showgrid showlayout showmakeup showpalet showprint showsetups showstruts showsymbolset someline somewhere sort space splitfloat startalignment startbackground startbuffer startbuffer startcolor startcolumns startcombination startcomment startcomponent startdescription startdocument startenumeration startenvironment startfact startfigure startfloattext startformula startframedtext starthiding startinteractionmenu startitemgroup startlegend startline startlinecorrection startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock startmarginrule startnamemakeup startnarrower startopposite startoverlay startoverview startpacked startparagraph startpositioning startpostponing startproduct startprofile startproject startquotation startregister startsymbolset startsynchronization starttable starttables starttabulate starttextrule starttyping startunpacked startversion stopalignment stopbackground stopbuffer stopbuffer stopcolor stopcolumns stopcombination stopcomment stopcomponent stopdescription stopdocument stopenumeration stopenvironment stopfact stopfigure stopfloattext stopformula stopframedtext stophiding stopinteractionmenu stopitemgroup stoplegend stopline stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopmarginrule stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stoppacked stopparagraph stoppositioning stoppostponing stopproduct stopprofile stopproject stopquotation stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptextrule stoptyping stopunpacked stopversion stretched sub subject subsection subsubject subsubsection subsubsubject switchtobodyfont sym symbol synchronizationbar synchronize synonym tab tex textreference textrule thinrule thinrules title tooltip translate typ type typebuffer typefile underbar underbars useURL useblocks usecommands usedirectory useencoding useexternaldocument useexternalfigure useexternalfile useexternalfiles useexternalsoundtrack usemodule usereferences usespecials usesymbols usetypescript usetypescriptfile version vl weekday whitespace wordright writebetweenlist writetolist writetoreferencelist writetoregister \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/cont-fr-scite.properties b/Master/texmf-dist/context/data/scite/cont-fr-scite.properties index caa7069dbb0..606be2efaba 100644 --- a/Master/texmf-dist/context/data/scite/cont-fr-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-fr-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.fr=\ -CAP Cap Caps Caractere Caracteres Chiffresromains \ -JOURSEMAINE MOIS MOT MOTS Mot Mots \ -a adaptedisposition ajustechamp alaligne alapage aligneadroite \ -aligneagauche aligneaumilieu appendix arriereplan baha barreinteraction \ -barresynchronisation bas bouton boutonsinteraction but cacheblocs \ -cap caractere caracteres champ changepolicecorps chapter \ -chem chiffresromains citation citer clip clonechamp \ -colonne commentaire comparegroupecouleur comparepalette completecombinedlist completelistoffloats \ -completelistofsorts completelistofsynonyms completeregistre composeenalinea concernant convertitnumero \ -copitchamp corrigeespaceblanc couleur couleurgrise coupledocument coupledregister \ -couplemarquage couplepapier coupleregistre crlf dactylographier dans \ -dansautremarge dansdroite dansgauche dansmarge date datecourante \ -daterecommandation de decouplemarquage definebodyfontDEF definebodyfontREF definedfont \ -definefontfeature definefonthandling defineframed defineframedtext definetypeface definit \ -definitbloc definitblocsection definitbuffer definitchamp definitcompoalinea definitconversion \ -definitcouleur definitdactylo definitdemarrestoppe definitdescription definitenumeration definitenvironnementpolicecorps \ -definitetiquette definitflottant definitformatreference definitgroupecouleur definitliste definitlisteimbriquee \ -definitlistereference definitlogo definitmakeup definitmarquage definitmenuinteraction definitmenuinteraction \ -definitnotepdp definitpalette definitparagraphes definitpilechamp definitpolice definitpolicecorps \ -definitprofil definitprogramme definitreference definitregistre definitrevetement definitsection \ -definitsortie definitsouschamp definitsymbole definitsymbolefigure definitsynonymepolice definitsynonymes \ -definittabulation definittaillepapier definittete definittexte definittrametableau definittri \ -definittype definitversion definitvide definitvide demarreciter deplacesurgrille \ -description determinecaracteristiqueliste determinenumerotete echelle ecran ecritdansliste \ -ecritdanslistereference ecritentreliste ecritregistre el element elements \ -emptylines enumeration espace espaceblanc espacesfixes etiquettes \ -etire fichierdactylo figureexterne forceblocs fraction framed \ -framedtext gardeblocs grille groupe haut hl \ -indentation inframed inhibemenuinteraction ininner inouter installelangue \ -joursemaine labeling labeltexte langue langueprincipale ligneh \ -lignenoire ligneregleetexte lignesnoires listesymbole loadsorts loadsynonyms \ -logchamp mapfontsize mar margereglee marquage marquageversion \ -marquepage mathematique mediaeval mois montrecadre montrechamps \ -montrecouleur montredisposition montreedition montreenvironnementpolicecorps montrefiguresexternes montregrille \ -montregroupecouleur montrejeusymboles montremakeup montrepalette montrepolicecorps montrereglages \ -montrestruts motdroit name nextsection nocap nop \ -note notepdp numeros numerotete numerotetecourant obtientmarquage \ -oriente overbar overbars page pagedouble paragraph \ -part pasplusdeblocs pasplusdefichiers periodes pilechamp placecoteacote \ -placeflottant placeformule placelegende placelesunsaudessusdesautres placeliste placelisteinmbriquee \ -placelisteinmbriquee placelistereference placelistoffloats placelistofsorts placelistofsynonyms placelogos \ -placemarquespages placenotespdp placenotespdplocales placeregistre placeregistre placesousformule \ -placesurgrille position prendbuffer programme publication qqpart \ -ran raz razmarquage recommandation ref reference \ -referencepage referencetexte reflete register reglealignement reglealineas \ -reglearrangement reglearriereplan reglearriereplans reglebarreinteraction reglebarresynchronisation reglebloc \ -regleblocmarge regleblocsection regleboutons reglebuffer reglecapitales reglechamp \ -reglechamps regleclipping reglecolonnes reglecombinaisons reglecommentaire reglecompoetroite \ -reglecomposeenalinea reglecouleur reglecouleurs regledactylo regledansmarge regledescriptions \ -regledisposition regleecraninteraction regleecrans regleelements regleencadre regleentete \ -regleenumerations regleenvironnementpolicecorps regleepaisseurligne regleespaceblanc regleespacement regleespacementinterligne \ -reglefiguresexternes regleflottant regleflottants regleformulaires regleformules regleinf \ -regleinteraction regleintitule regleintitules reglejeusymboles reglelabeltexte reglelangue \ -reglelegende reglelignes reglelignesnoires reglelignesreglestexte regleliste reglelisteimbriquee \ -reglelistereference reglemakeup reglemargereglee reglemarquage reglemarquagehyphenation reglemenuinteraction \ -reglenotepdp reglenumeropage reglenumerotation reglenumerotationligne reglenumerotationpage reglenumerotationparagraphe \ -reglenumerotete regleoriente reglepalette reglepapier regleparagraphes reglepdp \ -regleplacementopposition reglepolicecorps reglepositionnement regleprofils regleprogrammes reglepublications \ -reglereferencage regleregistre regleremplitligne regleremplitlignesreglees reglesection regleseparationflottant \ -reglesortie reglesouslignage reglesousnumeropage reglestrut reglesup reglesynchronisation \ -reglesynonymes reglesysteme regletab regletableaux regletabulation regletaillepapier \ -regletete regletetes regletexte regletextesentete regletextesinf regletextespdp \ -regletextessup regletextestexte regletextetete regletolerance regletraitsfins regletransitionspage \ -regletri regletype regleurl regleversions remplitchamp remplitligne \ -remplitlignesreglees remplittexte reservefloat resettextcontent sansalinea sansespace \ -sansespaceblanc sanslignesenteteetpdp sanslignessupetinf sansliste sansmarquage sauteblocs \ -section seeregister selectionneblocs selectionnepapier selectionneversion separeflottant \ -settext setupfonthandling setupfontsynonym setupframedtexts setupinterlinespace2 setupitemgroup \ -setuplistalternative sort sousligne souslignetout startalignment startarriereplan \ -startbuffer startbuffer startcitation startcolumns startcombination startcommentaire \ -startcomposant startcouleur startdescription startdocument startenumeration startenvironement \ -startfait startfigure startfloattext startformula startframedtext startgroupe \ -starthiding startitemgroup startlegend startligneregleetexte startline startlinecorrection \ -startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup \ -startmargereglee startmarginblock startmenuinteraction startnamemakeup startnarrower startopposite \ -startoverlay startoverview startparagraph startpositioning startpostponing startproduit \ -startprofile startprojet startregister startsymbolset startsynchronization starttable \ -starttables starttabulate starttyping startunpacked startversion stopalignment \ -stoparriereplan stopbuffer stopbuffer stopcitation stopcolumns stopcombination \ -stopcommentaire stopcomposant stopcouleur stopdescription stopdocument stopenumeration \ -stopenvironement stopfait stopfigure stopfloattext stopformula stopframedtext \ -stopgroupe stophiding stopitemgroup stoplegend stopligneregleetexte stopline \ -stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes \ -stopmakeup stopmargereglee stopmarginblock stopmenuinteraction stopnamemakeup stopnarrower \ -stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \ -stopproduit stopprofile stopprojet stopsymbolset stopsynchronization stoptable \ -stoptables stoptabulate stoptyping stopunpacked stopversion sub \ -subject subsection subsubject subsubsection subsubsubject suggestion \ -suivantprofil suivantversion suivantversionprofil surligne surlignetout sym \ -symbole synchronise synonym tab tapebuffer tapepage \ -tete tex textemarge textenotepdp textetete title \ -traduire traiteblocs traitepage traitfin traitsfins typ \ -uneligne usedirectory usetypescript usetypescriptfile utiliseURL utiliseblocs \ -utilisecommandes utilisedocumentexterne utiliseencodage utilisefichierexterne utilisefichiersexternes utilisefigureexterne \ -utilisemodule utilisepsiteaudioexterne utilisereferences utilisespecialites utilisesymboles va \ -vaalaboite vaenbas version vide vl \ No newline at end of file +keywordclass.macros.context.fr=CAP Cap Caps Caractere Caracteres Chiffresromains JOURSEMAINE MOIS MOT MOTS Mot Mots a adaptedisposition ajustechamp alaligne alapage aligneadroite aligneagauche aligneaumilieu appendix arriereplan baha barreinteraction barresynchronisation bas bouton boutonsinteraction but cacheblocs cap caractere caracteres champ changepolicecorps chapter chem chiffresromains citation citer clip clonechamp colonne commentaire comparegroupecouleur comparepalette completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregistre composeenalinea concernant convertitnumero copitchamp corrigeespaceblanc couleur couleurgrise coupledocument coupledregister couplemarquage couplepapier coupleregistre crlf dactylographier dans dansautremarge dansdroite dansgauche dansmarge date datecourante daterecommandation de decouplemarquage definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling defineframed defineframedtext definetypeface definit definitbloc definitblocsection definitbuffer definitchamp definitcompoalinea definitconversion definitcouleur definitdactylo definitdemarrestoppe definitdescription definitenumeration definitenvironnementpolicecorps definitetiquette definitflottant definitformatreference definitgroupecouleur definitliste definitlisteimbriquee definitlistereference definitlogo definitmakeup definitmarquage definitmenuinteraction definitmenuinteraction definitnotepdp definitpalette definitparagraphes definitpilechamp definitpolice definitpolicecorps definitprofil definitprogramme definitreference definitregistre definitrevetement definitsection definitsortie definitsouschamp definitsymbole definitsymbolefigure definitsynonymepolice definitsynonymes definittabulation definittaillepapier definittete definittexte definittrametableau definittri definittype definitversion definitvide definitvide demarreciter deplacesurgrille description determinecaracteristiqueliste determinenumerotete echelle ecran ecritdansliste ecritdanslistereference ecritentreliste ecritregistre el element elements emptylines enumeration espace espaceblanc espacesfixes etiquettes etire fichierdactylo figureexterne forceblocs fraction framed framedtext gardeblocs grille groupe haut hl indentation inframed inhibemenuinteraction ininner inouter installelangue joursemaine labeling labeltexte langue langueprincipale ligneh lignenoire ligneregleetexte lignesnoires listesymbole loadsorts loadsynonyms logchamp mapfontsize mar margereglee marquage marquageversion marquepage mathematique mediaeval mois montrecadre montrechamps montrecouleur montredisposition montreedition montreenvironnementpolicecorps montrefiguresexternes montregrille montregroupecouleur montrejeusymboles montremakeup montrepalette montrepolicecorps montrereglages montrestruts motdroit name nextsection nocap nop note notepdp numeros numerotete numerotetecourant obtientmarquage oriente overbar overbars overstrike overstrikes page pagedouble paragraph part pasplusdeblocs pasplusdefichiers periodes pilechamp placecoteacote placeflottant placeformule placelegende placelesunsaudessusdesautres placeliste placelisteinmbriquee placelisteinmbriquee placelistereference placelistoffloats placelistofsorts placelistofsynonyms placelogos placemarquespages placenotespdp placenotespdplocales placeregistre placeregistre placesousformule placesurgrille position prendbuffer programme publication qqpart ran raz razmarquage recommandation ref reference referencepage referencetexte reflete register reglealignement reglealineas reglearrangement reglearriereplan reglearriereplans reglebarreinteraction reglebarresynchronisation reglebloc regleblocmarge regleblocsection regleboutons reglebuffer reglecapitales reglechamp reglechamps regleclipping reglecolonnes reglecombinaisons reglecommentaire reglecompoetroite reglecomposeenalinea reglecouleur reglecouleurs regledactylo regledansmarge regledescriptions regledisposition regleecraninteraction regleecrans regleelements regleencadre regleentete regleenumerations regleenvironnementpolicecorps regleepaisseurligne regleespaceblanc regleespacement regleespacementinterligne reglefiguresexternes regleflottant regleflottants regleformulaires regleformules regleinf regleinteraction regleintitule regleintitules reglejeusymboles reglelabeltexte reglelangue reglelegende reglelignes reglelignesnoires reglelignesreglestexte regleliste reglelisteimbriquee reglelistereference reglemakeup reglemargereglee reglemarquage reglemarquagehyphenation reglemenuinteraction reglenotepdp reglenumeropage reglenumerotation reglenumerotationligne reglenumerotationpage reglenumerotationparagraphe reglenumerotete regleoriente reglepalette reglepapier regleparagraphes reglepdp regleplacementopposition reglepolicecorps reglepositionnement regleprofils regleprogrammes reglepublications reglereferencage regleregistre regleremplitligne regleremplitlignesreglees reglesection regleseparationflottant reglesortie reglesouslignage reglesousnumeropage reglestrut reglesup reglesynchronisation reglesynonymes reglesysteme regletab regletableaux regletabulation regletaillepapier regletete regletetes regletexte regletextesentete regletextesinf regletextespdp regletextessup regletextestexte regletextetete regletolerance regletraitsfins regletransitionspage regletri regletype regleurl regleversions remplitchamp remplitligne remplitlignesreglees remplittexte reservefloat resettextcontent sansalinea sansespace sansespaceblanc sanslignesenteteetpdp sanslignessupetinf sansliste sansmarquage sauteblocs section seeregister selectionneblocs selectionnepapier selectionneversion separeflottant settext setupfonthandling setupfontsynonym setupframedtexts setupinterlinespace2 setupitemgroup setuplistalternative sort startalignment startarriereplan startbuffer startbuffer startcitation startcolumns startcombination startcommentaire startcomposant startcouleur startdescription startdocument startenumeration startenvironement startfait startfigure startfloattext startformula startframedtext startgroupe starthiding startitemgroup startlegend startligneregleetexte startline startlinecorrection startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmargereglee startmarginblock startmenuinteraction startnamemakeup startnarrower startopposite startoverlay startoverview startparagraph startpositioning startpostponing startproduit startprofile startprojet startregister startsymbolset startsynchronization starttable starttables starttabulate starttyping startunpacked startversion stopalignment stoparriereplan stopbuffer stopbuffer stopcitation stopcolumns stopcombination stopcommentaire stopcomposant stopcouleur stopdescription stopdocument stopenumeration stopenvironement stopfait stopfigure stopfloattext stopformula stopframedtext stopgroupe stophiding stopitemgroup stoplegend stopligneregleetexte stopline stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmargereglee stopmarginblock stopmenuinteraction stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing stopproduit stopprofile stopprojet stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptyping stopunpacked stopversion sub subject subsection subsubject subsubsection subsubsubject suggestion suivantprofil suivantversion suivantversionprofil sym symbole synchronise synonym tab tapebuffer tapepage tete tex textemarge textenotepdp textetete title traduire traiteblocs traitepage traitfin traitsfins typ underbar underbars uneligne usedirectory usetypescript usetypescriptfile utiliseURL utiliseblocs utilisecommandes utilisedocumentexterne utiliseencodage utilisefichierexterne utilisefichiersexternes utilisefigureexterne utilisemodule utilisepsiteaudioexterne utilisereferences utilisespecialites utilisesymboles va vaalaboite vaenbas version vide vl \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/cont-it-scite.properties b/Master/texmf-dist/context/data/scite/cont-it-scite.properties index 0a6a31425f2..da5df24007d 100644 --- a/Master/texmf-dist/context/data/scite/cont-it-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-it-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.it=\ -CAP Cap Caps GIORNOSETTIMANA Lettera Lettere \ -MESE Numeriromani PAROLA PAROLE Parola Parole \ -accoppiacarta accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adattacampo \ -adattalayout al allineacentro allineadestra allineasinistra ap \ -apagina appendix barrainterazione barrasincronizzazione bastablocchi bastafile \ -cambiaafontdeltesto campi camporiempimento cap capello chapter \ -chim circondato citazione clip clonacampo colonna \ -colore coloregrigio commento completecombinedlist completelistoffloats completelistofsorts \ -completelistofsynonyms completeregister confrontagruppocolori confrontatavolozza convertinumero copiacampo \ -correggispaziobianco coupledregister crlf da daqualcheparte data \ -datadioggi datareferral definebodyfontDEF definebodyfontREF definedfont definefontfeature \ -definefonthandling definetypeface definisci definisciambientefontdeltesto definisciblocco definiscibloccosezione \ -definiscibuffer definiscicampo definiscicapoversi definiscicolore definisciconversione definiscidescrizione \ -definiscidimensionicarta definiscielenco definiscielencocombinato definiscienumerazione definiscietichetta definiscifigurasimbolo \ -definiscifont definiscifontdeltesto definisciformatoriferimento definiscigruppocolori definisciincorniciato definisciiniziatermina \ -definiscilistariferimenti definiscilogo definiscimakeup definiscimarcatura definiscimenuinterazione definiscimenuinterazione \ -definiscimodellotabella definiscioggettomobile definisciordinamento definiscioutput definisciprofilo definisciprogramma \ -definisciregistro definiscirientro definisciriferimento definiscirigovuoto definiscisezione definiscisimbolo \ -definiscisinonimi definiscisinonimofont definiscisottocampo definiscisovrapposizione definiscistackcampi definiscitabulato \ -definiscitavolozza definiscitesta definiscitesto definiscitestoincorniciato definiscitype definiscityping \ -definisciversion description determinacarattersticheelenco determinanumerotesta disabilitamenuinterazione el \ -elaborablocchi elaborapagina elementi elemento emptylines enumeration \ -etichette figuraesterna fondo forzablocchi framedtext frazione \ -giornosettimana griglia hl impaccato impostaallineamento impostaambientefontdeltesto \ -impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine impostablocco impostabloccosezione \ -impostabuffer impostacampi impostacampo impostacapoversi impostacaption impostacaptions \ -impostacima impostaclippling impostacolonne impostacolore impostacolori impostacombinazioni \ -impostacommento impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi impostaelenco \ -impostaelencocombinato impostaenumerazioni impostafigureesterne impostafondo impostafontdeltesto impostaforms \ -impostaformule impostaincorniciato impostainmargine impostainstestazione impostainterazione impostainterlinea \ -impostalayout impostalegenda impostalineemargine impostalineenere impostalineeriempimento impostalineesottili \ -impostalineetesto impostalingua impostalistariferimenti impostamaiuscole impostamakeup impostamarcatura \ -impostamenuinterazione impostamenzione impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina \ -impostanumerazionerighe impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili impostaoggettomobile \ -impostaordinamento impostaoutput impostaparranging impostapdp impostapiustretto impostaposizionamento \ -impostaposizionamentoopposti impostaprofili impostaprogrammi impostapubblicazioni impostapulsanti impostaregistro \ -impostarientri impostarientro impostariferimento impostarighe impostarigheriempimento impostarigovuoto \ -impostarotazione impostaschermi impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione \ -impostasfondi impostasfondo impostasincronizzazione impostasinonimi impostasistema impostasottolinea \ -impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut impostatab impostatabelle \ -impostatabulato impostatavolozza impostatesta impostateste impostatesticima impostatestifondo \ -impostatestiincorniciati impostatestiintestazioni impostatestipdp impostatesto impostatestoetichette impostatestointestazioni \ -impostatestotesti impostatolleranza impostatransizionepagina impostatype impostatyping impostaurl \ -impostaversioni in inaltromargine incorniciato indentation indestra \ -ininner inmargine inouter inriga insinistra installalingua \ -intorno labeling lettera lettere lineamargine lineanera \ -lineasottile lineatesto lineenere lineeriempimento lineesottili lingua \ -linguaprincipale listsymbol loadsorts loadsynonyms logcampi mapfontsize \ -mar marcatura marcaversione matematica mediaeval menzione \ -mese mettielenco mettielencocombinato mettielencocombinato mettifiancoafianco mettiformula \ -mettiingriglia mettilegenda mettiloghi mettinotepdp mettinotepdplocali mettiregistro \ -mettiregistro mettisegnalibro mettisottoformula mettiunosullaltro mostraambientefontdeltesto mostracampi \ -mostracolore mostracornice mostrafiguresterne mostrafontdeltesto mostragriglia mostragruppocolori \ -mostraimpostazioni mostralyout mostramakeup mostrasetsimboli mostrastampa mostrastruts \ -mostratavolozza name nascondiblocchi nextsection nienteelenco nientelineecimafondo \ -nientelineintestazionepdp nientemarcatura nienterientro nientespazio nientespaziobianco nocap \ -nop nota notapdp numeri numeriromani numerotesta \ -numerotestacorrente pagina paragraph paroladestra part ped \ -pedap placefloat placelistoffloats placelistofsorts placelistofsynonyms placereferencelist \ -posizione prendibuffer prendimarcatura programma pubblicazione pulsante \ -pulsantinterazione punti qualcheriga ran referral register \ -reimposta reimpostamarcatura reservefloat resettextcontent rientro rif \ -riferimento riferimentopagina riferimentotesto riflessione rigariempimento rigovuoto \ -ruota saltablocchi sbarrati sbarrato scala schermo \ -scrividentroelenco scriviinelenco scriviinlistariferimenti scriviinregistro section seeregister \ -segnalibro seguiprofilo seguiversione seguiversioneprofilo selezionablocchi selezionacarta \ -selezionaversione separamarcatura settext setupfonthandling setupfontsynonym setupinterlinespace2 \ -setupitemgroup setuplistalternative setuppaper sfondo sim simbolo \ -sincronizza sopralinea sopralinee sort sottolinea sottolinee \ -spazifissi spazio spaziobianco spezzaoggettomobile spostaagriglia stackcampi \ -startalignment startambiente startbuffer startbuffer startcitazione startcolore \ -startcolumns startcombination startcommento startcomponenet startdescription startdocument \ -startenumeration startfatto startfigure startfloattext startformula startframedtext \ -starthiding startimpaccato startitemgroup startlegend startline startlineamargine \ -startlineatesto startlinecorrection startlinenumbering startlines startlocal startlocalenvironment \ -startlocalfootnotes startmakeup startmarginblock startmenuinterattivo startnamemakeup startnarrower \ -startopposite startoverlay startoverview startparagraph startpositioning startpostponing \ -startprodotto startprofile startprogetto startregister startsfondo startsymbolset \ -startsynchronization starttable starttables starttabulate starttyping startunpacked \ -startversione stirato stopalignment stopambiente stopbuffer stopbuffer \ -stopcitazione stopcolore stopcolumns stopcombination stopcommento stopcomponenet \ -stopdescription stopdocument stopenumeration stopfatto stopfigure stopfloattext \ -stopformula stopframedtext stophiding stopimpaccato stopitemgroup stoplegend \ -stopline stoplineamargine stoplineatesto stoplinecorrection stoplinenumbering stoplines \ -stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopmenuinterattivo \ -stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph \ -stoppositioning stoppostponing stopprodotto stopprofile stopprogetto stopsfondo \ -stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptyping \ -stopunpacked stopversione sub subject subsection subsubject \ -subsubsection subsubsubject synonym tab testa testoetichetta \ -testoinmargine testoinstestazioni testonotapdp testoriempimento tex tieniblocchi \ -tipopagina title tooltip traduci typ type \ -typebuffer typefile usaURL usablocco usacodifica usacolonnasonoraesterna \ -usacomandi usadocumentoesterno usafiguraesterna usafileesterni usafileesterno usamodulo \ -usariferimenti usasimboli usaspecialita usedirectory usetypescript usetypescriptfile \ -vaia vaiabox vaigiu versione vl \ No newline at end of file +keywordclass.macros.context.it=CAP Cap Caps GIORNOSETTIMANA Lettera Lettere MESE Numeriromani PAROLA PAROLE Parola Parole accoppiacarta accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adattacampo adattalayout al allineacentro allineadestra allineasinistra ap apagina appendix barrainterazione barrasincronizzazione bastablocchi bastafile cambiaafontdeltesto campi camporiempimento cap capello chapter chim circondato citazione clip clonacampo colonna colore coloregrigio commento completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister confrontagruppocolori confrontatavolozza convertinumero copiacampo correggispaziobianco coupledregister crlf da daqualcheparte data datadioggi datareferral definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling definetypeface definisci definisciambientefontdeltesto definisciblocco definiscibloccosezione definiscibuffer definiscicampo definiscicapoversi definiscicolore definisciconversione definiscidescrizione definiscidimensionicarta definiscielenco definiscielencocombinato definiscienumerazione definiscietichetta definiscifigurasimbolo definiscifont definiscifontdeltesto definisciformatoriferimento definiscigruppocolori definisciincorniciato definisciiniziatermina definiscilistariferimenti definiscilogo definiscimakeup definiscimarcatura definiscimenuinterazione definiscimenuinterazione definiscimodellotabella definiscioggettomobile definisciordinamento definiscioutput definisciprofilo definisciprogramma definisciregistro definiscirientro definisciriferimento definiscirigovuoto definiscisezione definiscisimbolo definiscisinonimi definiscisinonimofont definiscisottocampo definiscisovrapposizione definiscistackcampi definiscitabulato definiscitavolozza definiscitesta definiscitesto definiscitestoincorniciato definiscitype definiscityping definisciversion description determinacarattersticheelenco determinanumerotesta disabilitamenuinterazione el elaborablocchi elaborapagina elementi elemento emptylines enumeration etichette figuraesterna fondo forzablocchi framedtext frazione giornosettimana griglia hl impaccato impostaallineamento impostaambientefontdeltesto impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine impostablocco impostabloccosezione impostabuffer impostacampi impostacampo impostacapoversi impostacaption impostacaptions impostacima impostaclippling impostacolonne impostacolore impostacolori impostacombinazioni impostacommento impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi impostaelenco impostaelencocombinato impostaenumerazioni impostafigureesterne impostafondo impostafontdeltesto impostaforms impostaformule impostaincorniciato impostainmargine impostainstestazione impostainterazione impostainterlinea impostalayout impostalegenda impostalineemargine impostalineenere impostalineeriempimento impostalineesottili impostalineetesto impostalingua impostalistariferimenti impostamaiuscole impostamakeup impostamarcatura impostamenuinterazione impostamenzione impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina impostanumerazionerighe impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili impostaoggettomobile impostaordinamento impostaoutput impostaparranging impostapdp impostapiustretto impostaposizionamento impostaposizionamentoopposti impostaprofili impostaprogrammi impostapubblicazioni impostapulsanti impostaregistro impostarientri impostarientro impostariferimento impostarighe impostarigheriempimento impostarigovuoto impostarotazione impostaschermi impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione impostasfondi impostasfondo impostasincronizzazione impostasinonimi impostasistema impostasottolinea impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut impostatab impostatabelle impostatabulato impostatavolozza impostatesta impostateste impostatesticima impostatestifondo impostatestiincorniciati impostatestiintestazioni impostatestipdp impostatesto impostatestoetichette impostatestointestazioni impostatestotesti impostatolleranza impostatransizionepagina impostatype impostatyping impostaurl impostaversioni in inaltromargine incorniciato indentation indestra ininner inmargine inouter inriga insinistra installalingua intorno labeling lettera lettere lineamargine lineanera lineasottile lineatesto lineenere lineeriempimento lineesottili lingua linguaprincipale listsymbol loadsorts loadsynonyms logcampi mapfontsize mar marcatura marcaversione matematica mediaeval menzione mese mettielenco mettielencocombinato mettielencocombinato mettifiancoafianco mettiformula mettiingriglia mettilegenda mettiloghi mettinotepdp mettinotepdplocali mettiregistro mettiregistro mettisegnalibro mettisottoformula mettiunosullaltro mostraambientefontdeltesto mostracampi mostracolore mostracornice mostrafiguresterne mostrafontdeltesto mostragriglia mostragruppocolori mostraimpostazioni mostralyout mostramakeup mostrasetsimboli mostrastampa mostrastruts mostratavolozza name nascondiblocchi nextsection nienteelenco nientelineecimafondo nientelineintestazionepdp nientemarcatura nienterientro nientespazio nientespaziobianco nocap nop nota notapdp numeri numeriromani numerotesta numerotestacorrente overbar overbars overstrike overstrikes pagina paragraph paroladestra part ped pedap placefloat placelistoffloats placelistofsorts placelistofsynonyms placereferencelist posizione prendibuffer prendimarcatura programma pubblicazione pulsante pulsantinterazione punti qualcheriga ran referral register reimposta reimpostamarcatura reservefloat resettextcontent rientro rif riferimento riferimentopagina riferimentotesto riflessione rigariempimento rigovuoto ruota saltablocchi scala schermo scrividentroelenco scriviinelenco scriviinlistariferimenti scriviinregistro section seeregister segnalibro seguiprofilo seguiversione seguiversioneprofilo selezionablocchi selezionacarta selezionaversione separamarcatura settext setupfonthandling setupfontsynonym setupinterlinespace2 setupitemgroup setuplistalternative setuppaper sfondo sim simbolo sincronizza sort spazifissi spazio spaziobianco spezzaoggettomobile spostaagriglia stackcampi startalignment startambiente startbuffer startbuffer startcitazione startcolore startcolumns startcombination startcommento startcomponenet startdescription startdocument startenumeration startfatto startfigure startfloattext startformula startframedtext starthiding startimpaccato startitemgroup startlegend startline startlineamargine startlineatesto startlinecorrection startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock startmenuinterattivo startnamemakeup startnarrower startopposite startoverlay startoverview startparagraph startpositioning startpostponing startprodotto startprofile startprogetto startregister startsfondo startsymbolset startsynchronization starttable starttables starttabulate starttyping startunpacked startversione stirato stopalignment stopambiente stopbuffer stopbuffer stopcitazione stopcolore stopcolumns stopcombination stopcommento stopcomponenet stopdescription stopdocument stopenumeration stopfatto stopfigure stopfloattext stopformula stopframedtext stophiding stopimpaccato stopitemgroup stoplegend stopline stoplineamargine stoplineatesto stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopmenuinterattivo stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing stopprodotto stopprofile stopprogetto stopsfondo stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptyping stopunpacked stopversione sub subject subsection subsubject subsubsection subsubsubject synonym tab testa testoetichetta testoinmargine testoinstestazioni testonotapdp testoriempimento tex tieniblocchi tipopagina title tooltip traduci typ type typebuffer typefile underbar underbars usaURL usablocco usacodifica usacolonnasonoraesterna usacomandi usadocumentoesterno usafiguraesterna usafileesterni usafileesterno usamodulo usariferimenti usasimboli usaspecialita usedirectory usetypescript usetypescriptfile vaia vaiabox vaigiu versione vl \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/cont-nl-scite.properties b/Master/texmf-dist/context/data/scite/cont-nl-scite.properties index e3254a5b489..688c8a7c034 100644 --- a/Master/texmf-dist/context/data/scite/cont-nl-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-nl-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.nl=\ -CAP Cap Caps Letter Letters MAAND \ -Romeins WEEKDAG WOORD WOORDEN Woord Woorden \ -about achtergrond appendix bepaalkopnummer bepaallijstkenmerken blanko \ -blokje blokjes blokkeerinteractiemenu bookmark breuk but \ -button cap chapter chem cijfers citaat \ -citeer clip commentaar completecombinedlist completelistoffloats completelistofsorts \ -completelistofsynonyms converteernummer copieerveld corrigeerwitruimte coupledregister crlf \ -datum definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling \ -definetypeface definieer definieeralineas definieerbeeldmerk definieerblanko definieerblok \ -definieerbuffer definieerconversie definieerfiguursymbool definieerfont definieerfontsynoniem definieerinteractiemenu \ -definieerinteractiemenu definieerkadertekst definieerkleur definieerkleurgroep definieerkop definieerkorps \ -definieerkorpsomgeving definieerlijst definieermarkering definieeromlijnd definieeropmaak definieeroverlay \ -definieerpalet definieerpapierformaat definieerplaatsblok definieerprofiel definieerprogramma definieerreferentie \ -definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst definieersectie definieersectieblok \ -definieersorteren definieerstartstop definieersubveld definieersymbool definieersynoniemen definieertabelvorm \ -definieertabulatie definieertekst definieertype definieertypen definieeruitvoer definieerveld \ -definieerveldstapel definieerversie description doordefinieren doorlabelen doornummeren \ -doorspringen doorstreep doorstrepen dunnelijn dunnelijnen eenregel \ -enumeration ergens externfiguur forceerblokken framedtext gebruikURL \ -gebruikblokken gebruikcommandos gebruikexterndocument gebruikexternefile gebruikexternefiles gebruikexternfiguur \ -gebruikexterngeluidsfragment gebruikmodule gebruikreferenties gebruikspecials gebruiksymbolen gebruiktypescript \ -gebruiktypescriptfile geenblokkenmeer geenbovenenonderregels geenfilesmeer geenhoofdenvoetregels geenlijst \ -geenmarkering geenspatie geenwitruimte grijskleur haalbuffer haalmarkering \ -haarlijn handhaafblokken hl hoofdtaal hoog huidigedatum \ -huidigekopnummer in inanderemarge inbinnen inbuiten indentation \ -inlijnd inlinker inmarge inrechter inregel inspringen \ -installeertaal interactiebalk interactiebuttons invullijnen invulregel invultekst \ -invulveld items its kantlijn kenmerk kenmerkdatum \ -kleur kloonveld kolom kop kopnummer koppeldocument \ -koppelmarkering koppelpagina koppelpapier koppelregister koptekst laag \ -labeling labels labeltekst laho legeregels letter \ -letters lijstsymbool loadsorts loadsynonyms maand mapfontsize \ -mar margetekst markeer markeerversie mediaeval naar \ -naarbox name nextsection nietinspringen nocap noot \ -nop omlaag omlijnd onderstreep onderstrepen ontkoppelmarkering \ -op opelkaar oppagina overstreep overstrepen pagina \ -paginareferentie paragraph part paslayoutaan passeerblokken passendveld \ -plaatsbeeldmerken plaatsbookmarks plaatsformule plaatslegenda plaatslijst plaatslokalevoetnoten \ -plaatsnaastelkaar plaatsonderelkaar plaatsopgrid plaatsplaatsblok plaatsreferentielijst plaatsregister \ -plaatsregister plaatssamengesteldelijst plaatssamengesteldelijst plaatssubformule plaatsvoetnoten placelistoffloats \ -placelistofsorts placelistofsynonyms positioneer programma publicatie punten \ -ran ref referentie regellinks regelmidden regelrechts \ -register registreervelden reservefloat reset resetmarkering resettextcontent \ -romeins rooster roteer schaal scherm schrijfnaarlijst \ -schrijfnaarreferentielijst schrijfnaarregister schrijftussenlijst section seeregister selecteerblokken \ -selecteerpapier selecteerversie setupfonthandling setupfontsynonym setupinterlinespace2 setuplistalternative \ -som soortpagina sort spatie spiegel splitsplaatsblok \ -startachtergrond startalignment startbuffer startbuffer startcitaat startcolumns \ -startcombination startcommentaar startdescription startdocument startenumeration startfigure \ -startfloattext startformula startframedtext startgeg starthiding startinteractiemenu \ -startitemgroup startkantlijn startkleur startlegend startline startlinecorrection \ -startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup \ -startmarginblock startnamemakeup startnarrower startomgeving startonderdeel startopelkaar \ -startopposite startoverlay startoverview startparagraph startpositioning startpostponing \ -startprodukt startprofile startprojekt startregister startsymbolset startsynchronization \ -starttable starttables starttabulate starttekstlijn starttyping startunpacked \ -startversie stelachtergrondenin stelachtergrondin stelalineasin stelarrangerenin stelblankoin \ -stelblokin stelblokjesin stelblokkopjein stelblokkopjesin stelbovenin stelboventekstenin \ -stelbufferin stelbuttonsin stelciterenin stelclipin stelcombinatiesin stelcommentaarin \ -steldoordefinierenin steldoornummerenin steldoorspringenin steldunnelijnenin stelexternefigurenin stelformulesin \ -stelformulierenin stelhoofdin stelhoofdtekstenin stelinmargein stelinspringenin stelinteractiebalkin \ -stelinteractiein stelinteractiemenuin stelinteractieschermin stelinterliniein stelinvullijnenin stelinvulregelsin \ -stelitemgroepin stelitemsin stelkadertekstenin stelkantlijnin stelkapitalenin stelkleurenin \ -stelkleurin stelkolommenin stelkopin stelkopnummerin stelkoppeltekenin stelkoppenin \ -stelkoptekstin stelkorpsin stelkorpsomgevingin stellabeltekstin stellayoutin stellegendain \ -stellijndiktein stellijstin stelmargeblokkenin stelmarkeringin stelnaastplaatsenin stelnummerenin \ -stelomlijndin stelonderin stelonderstrepenin stelondertekstenin stelopmaakin stelpaginanummerin \ -stelpaginanummeringin stelpaginaovergangenin stelpaletin stelpapierformaatin stelpapierin stelparagraafnummerenin \ -stelplaatsblokin stelplaatsblokkenin stelplaatsbloksplitsenin stelpositionerenin stelprofielenin stelprogrammasin \ -stelpublicatiesin stelrastersin stelreferentielijstin stelrefererenin stelregelnummerenin stelregelsin \ -stelregisterin stelroterenin stelsamengesteldelijstin stelsectieblokin stelsectiein stelsmallerin \ -stelsorterenin stelspatieringin stelstrutin stelsubpaginanummerin stelsymboolsetin stelsynchronisatiebalkin \ -stelsynchronisatiein stelsynoniemenin stelsysteemin steltaalin steltabellenin steltabin \ -steltabulatiein steltekstin steltekstinhoudin steltekstlijnenin stelteksttekstenin steltolerantiein \ -steltypein steltypenin steluitlijnenin steluitvoerin stelurlin stelveldenin \ -stelveldin stelversiesin stelvoetin stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin \ -stelwitruimtein stopachtergrond stopalignment stopbuffer stopbuffer stopcitaat \ -stopcolumns stopcombination stopcommentaar stopdescription stopdocument stopenumeration \ -stopfigure stopfloattext stopformula stopframedtext stopgeg stophiding \ -stopinteractiemenu stopitemgroup stopkantlijn stopkleur stoplegend stopline \ -stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes \ -stopmakeup stopmarginblock stopnamemakeup stopnarrower stopomgeving stoponderdeel \ -stopopelkaar stopopposite stopoverlay stopoverview stopparagraph stoppositioning \ -stoppostponing stopprodukt stopprofile stopprojekt stopsymbolset stopsynchronization \ -stoptable stoptables stoptabulate stoptekstlijn stoptyping stopunpacked \ -stopversie sub subject subsection subsubject subsubsection \ -subsubsubject suggestie switchnaarkorps sym symbool synchronisatiebalk \ -synchroniseer synonym taal tab tekstlijn tekstreferentie \ -tex title toonexternefiguren toongrid tooninstellingen toonkader \ -toonkleur toonkleurgroep toonkorps toonkorpsomgeving toonlayout toonopmaak \ -toonpalet toonprint toonstruts toonsymboolset toonvelden typ \ -type typebuffer typefile uit uitgerekt usecodering \ -usedirectory vastespaties veld veldstapel verbergblokken vergelijkkleurgroep \ -vergelijkpalet verplaatsopgrid versie vertaal verwerkblokken verwerkpagina \ -vl voetnoot voetnoottekst volgprofiel volgprofielversie volgversie \ -volledigregister weekdag wiskunde witruimte woordrechts \ No newline at end of file +keywordclass.macros.context.nl=CAP Cap Caps Letter Letters MAAND Romeins WEEKDAG WOORD WOORDEN Woord Woorden about achtergrond appendix bepaalkopnummer bepaallijstkenmerken blanko blokje blokjes blokkeerinteractiemenu bookmark breuk but button cap chapter chem cijfers citaat citeer clip commentaar completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms converteernummer copieerveld corrigeerwitruimte coupledregister crlf datum definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling definetypeface definieer definieeralineas definieerbeeldmerk definieerblanko definieerblok definieerbuffer definieerconversie definieerfiguursymbool definieerfont definieerfontsynoniem definieerinteractiemenu definieerinteractiemenu definieerkadertekst definieerkleur definieerkleurgroep definieerkop definieerkorps definieerkorpsomgeving definieerlijst definieermarkering definieeromlijnd definieeropmaak definieeroverlay definieerpalet definieerpapierformaat definieerplaatsblok definieerprofiel definieerprogramma definieerreferentie definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst definieersectie definieersectieblok definieersorteren definieerstartstop definieersubveld definieersymbool definieersynoniemen definieertabelvorm definieertabulatie definieertekst definieertype definieertypen definieeruitvoer definieerveld definieerveldstapel definieerversie description doordefinieren doorlabelen doornummeren doorspringen dunnelijn dunnelijnen eenregel enumeration ergens externfiguur forceerblokken framedtext gebruikURL gebruikblokken gebruikcommandos gebruikexterndocument gebruikexternefile gebruikexternefiles gebruikexternfiguur gebruikexterngeluidsfragment gebruikmodule gebruikreferenties gebruikspecials gebruiksymbolen gebruiktypescript gebruiktypescriptfile geenblokkenmeer geenbovenenonderregels geenfilesmeer geenhoofdenvoetregels geenlijst geenmarkering geenspatie geenwitruimte grijskleur haalbuffer haalmarkering haarlijn handhaafblokken hl hoofdtaal hoog huidigedatum huidigekopnummer in inanderemarge inbinnen inbuiten indentation inlijnd inlinker inmarge inrechter inregel inspringen installeertaal interactiebalk interactiebuttons invullijnen invulregel invultekst invulveld items its kantlijn kenmerk kenmerkdatum kleur kloonveld kolom kop kopnummer koppeldocument koppelmarkering koppelpagina koppelpapier koppelregister koptekst laag labeling labels labeltekst laho legeregels letter letters lijstsymbool loadsorts loadsynonyms maand mapfontsize mar margetekst markeer markeerversie mediaeval naar naarbox name nextsection nietinspringen nocap noot nop omlaag omlijnd ontkoppelmarkering op opelkaar oppagina overbar overbars overstrike overstrikes pagina paginareferentie paragraph part paslayoutaan passeerblokken passendveld plaatsbeeldmerken plaatsbookmarks plaatsformule plaatslegenda plaatslijst plaatslokalevoetnoten plaatsnaastelkaar plaatsonderelkaar plaatsopgrid plaatsplaatsblok plaatsreferentielijst plaatsregister plaatsregister plaatssamengesteldelijst plaatssamengesteldelijst plaatssubformule plaatsvoetnoten placelistoffloats placelistofsorts placelistofsynonyms positioneer programma publicatie punten ran ref referentie regellinks regelmidden regelrechts register registreervelden reservefloat reset resetmarkering resettextcontent romeins rooster roteer schaal scherm schrijfnaarlijst schrijfnaarreferentielijst schrijfnaarregister schrijftussenlijst section seeregister selecteerblokken selecteerpapier selecteerversie setupfonthandling setupfontsynonym setupinterlinespace2 setuplistalternative som soortpagina sort spatie spiegel splitsplaatsblok startachtergrond startalignment startbuffer startbuffer startcitaat startcolumns startcombination startcommentaar startdescription startdocument startenumeration startfigure startfloattext startformula startframedtext startgeg starthiding startinteractiemenu startitemgroup startkantlijn startkleur startlegend startline startlinecorrection startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock startnamemakeup startnarrower startomgeving startonderdeel startopelkaar startopposite startoverlay startoverview startparagraph startpositioning startpostponing startprodukt startprofile startprojekt startregister startsymbolset startsynchronization starttable starttables starttabulate starttekstlijn starttyping startunpacked startversie stelachtergrondenin stelachtergrondin stelalineasin stelarrangerenin stelblankoin stelblokin stelblokjesin stelblokkopjein stelblokkopjesin stelbovenin stelboventekstenin stelbufferin stelbuttonsin stelciterenin stelclipin stelcombinatiesin stelcommentaarin steldoordefinierenin steldoornummerenin steldoorspringenin steldunnelijnenin stelexternefigurenin stelformulesin stelformulierenin stelhoofdin stelhoofdtekstenin stelinmargein stelinspringenin stelinteractiebalkin stelinteractiein stelinteractiemenuin stelinteractieschermin stelinterliniein stelinvullijnenin stelinvulregelsin stelitemgroepin stelitemsin stelkadertekstenin stelkantlijnin stelkapitalenin stelkleurenin stelkleurin stelkolommenin stelkopin stelkopnummerin stelkoppeltekenin stelkoppenin stelkoptekstin stelkorpsin stelkorpsomgevingin stellabeltekstin stellayoutin stellegendain stellijndiktein stellijstin stelmargeblokkenin stelmarkeringin stelnaastplaatsenin stelnummerenin stelomlijndin stelonderin stelonderstrepenin stelondertekstenin stelopmaakin stelpaginanummerin stelpaginanummeringin stelpaginaovergangenin stelpaletin stelpapierformaatin stelpapierin stelparagraafnummerenin stelplaatsblokin stelplaatsblokkenin stelplaatsbloksplitsenin stelpositionerenin stelprofielenin stelprogrammasin stelpublicatiesin stelrastersin stelreferentielijstin stelrefererenin stelregelnummerenin stelregelsin stelregisterin stelroterenin stelsamengesteldelijstin stelsectieblokin stelsectiein stelsmallerin stelsorterenin stelspatieringin stelstrutin stelsubpaginanummerin stelsymboolsetin stelsynchronisatiebalkin stelsynchronisatiein stelsynoniemenin stelsysteemin steltaalin steltabellenin steltabin steltabulatiein steltekstin steltekstinhoudin steltekstlijnenin stelteksttekstenin steltolerantiein steltypein steltypenin steluitlijnenin steluitvoerin stelurlin stelveldenin stelveldin stelversiesin stelvoetin stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin stelwitruimtein stopachtergrond stopalignment stopbuffer stopbuffer stopcitaat stopcolumns stopcombination stopcommentaar stopdescription stopdocument stopenumeration stopfigure stopfloattext stopformula stopframedtext stopgeg stophiding stopinteractiemenu stopitemgroup stopkantlijn stopkleur stoplegend stopline stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopnamemakeup stopnarrower stopomgeving stoponderdeel stopopelkaar stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing stopprodukt stopprofile stopprojekt stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptekstlijn stoptyping stopunpacked stopversie sub subject subsection subsubject subsubsection subsubsubject suggestie switchnaarkorps sym symbool synchronisatiebalk synchroniseer synonym taal tab tekstlijn tekstreferentie tex title toonexternefiguren toongrid tooninstellingen toonkader toonkleur toonkleurgroep toonkorps toonkorpsomgeving toonlayout toonopmaak toonpalet toonprint toonstruts toonsymboolset toonvelden typ type typebuffer typefile uit uitgerekt underbar underbars usecodering usedirectory vastespaties veld veldstapel verbergblokken vergelijkkleurgroep vergelijkpalet verplaatsopgrid versie vertaal verwerkblokken verwerkpagina vl voetnoot voetnoottekst volgprofiel volgprofielversie volgversie volledigregister weekdag wiskunde witruimte woordrechts \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/cont-pe-scite.properties b/Master/texmf-dist/context/data/scite/cont-pe-scite.properties index a9756b04c44..a1769572557 100644 --- a/Master/texmf-dist/context/data/scite/cont-pe-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-pe-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.pe=\ -CAP Cap Caps Character Characters MONTH \ -Romannumerals WEEKDAY WORD WORDS Word Words \ -appendix cap chapter chem completecombinedlist completelistoffloats \ -completelistofsorts completelistofsynonyms coupledregister crlf definebodyfontDEF definebodyfontREF \ -definedfont definefontfeature definefonthandling definetypeface description enumeration \ -framedtext indentation its labeling loadsorts loadsynonyms \ -mapfontsize mediaeval name nextsection nocap paragraph \ -part placelistoffloats placelistofsorts placelistofsynonyms ran register \ -reservefloat resettextcontent section seeregister setupcapitals setupfonthandling \ -setupfontsynonym setupinterlinespace2 setuplistalternative setupurl sort startalignment \ -startbuffer startbuffer startcolumns startcombination startdescription startdocument \ -startenumeration startfigure startfloattext startformula startframedtext starthiding \ -startitemgroup startlegend startline startlinecorrection startlinenumbering startlines \ -startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock startnamemakeup \ -startnarrower startopposite startoverlay startoverview startparagraph startpositioning \ -startpostponing startprofile startregister startsymbolset startsynchronization starttable \ -starttables starttabulate starttyping startunpacked startتوضیح startتولید \ -startحقیقت startخط‌حاشیه startخط‌متن startرنگ startفشرده startمحیط \ -startمنوی‌پانل startمولفه startنسخه startنقل‌قول startپروژه startپس‌زمینه \ -stopalignment stopbuffer stopbuffer stopcolumns stopcombination stopdescription \ -stopdocument stopenumeration stopfigure stopfloattext stopformula stopframedtext \ -stophiding stopitemgroup stoplegend stopline stoplinecorrection stoplinenumbering \ -stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock \ -stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph \ -stoppositioning stoppostponing stopprofile stopsymbolset stopsynchronization stoptable \ -stoptables stoptabulate stoptyping stopunpacked stopتوضیح stopتولید \ -stopحقیقت stopخط‌حاشیه stopخط‌متن stopرنگ stopفشرده stopمحیط \ -stopمنوی‌پانل stopمولفه stopنسخه stopنقل‌قول stopپروژه stopپس‌زمینه \ -sub subject subsection subsubject subsubsection subsubsubject \ -synonym title tooltip typ useURL usedirectory \ -آیتم آیتمها آینه اجباربلوکها از ازکارانداختن‌منوی‌پانل \ -استفاده‌بلوکها استفاده‌دستخط‌تایپ استفاده‌رمزینه استفاده‌شکل‌خارجی استفاده‌فرمانها استفاده‌قطعه‌موزیک‌خارجی \ -استفاده‌مدول استفاده‌مرجعها استفاده‌نمادها استفاده‌نوشتارخارجی استفاده‌ویژگیها استفاده‌پرونده‌خارجی \ -استفاده‌پرونده‌دستخط‌تایپ استفاده‌پرونده‌های‌خارجی اعدادلاتین اما انتخاب‌برگ انتخاب‌بلوکها \ -انتخاب‌نسخه انتقال‌به‌توری بارگذاری‌آرایش بارگذاری‌آیتمها بارگذاری‌ارجاع بارگذاری‌اندازه‌برگ \ -بارگذاری‌باریکتر بارگذاری‌بافر بارگذاری‌بالا بارگذاری‌بخش بارگذاری‌بردباری بارگذاری‌برنامه‌ها \ -بارگذاری‌برگ بارگذاری‌بست بارگذاری‌بلوک بارگذاری‌بلوکهای‌حاشیه بارگذاری‌بلوک‌بخش بارگذاری‌تایپ \ -بارگذاری‌تایپ‌کردن بارگذاری‌تب بارگذاری‌ترتیب بارگذاری‌ترتیب بارگذاری‌ترکیب‌ها بارگذاری‌تطابق \ -بارگذاری‌تعریف‌پانوشت بارگذاری‌تنظیم بارگذاری‌ته‌برگ بارگذاری‌تورفتگی بارگذاری‌تورفتگیها بارگذاری‌توضیح \ -بارگذاری‌ثبت بارگذاری‌جدولها بارگذاری‌جدول‌بندی بارگذاری‌خالی بارگذاری‌خروجی بارگذاری‌خطها \ -بارگذاری‌خطهای‌حاشیه بارگذاری‌خطهای‌سیاه بارگذاری‌خطهای‌متن بارگذاری‌خطها‌ی‌نازک بارگذاری‌درج‌درخطها بارگذاری‌درج‌مخالف \ -بارگذاری‌درون‌حاشیه بارگذاری‌دوران بارگذاری‌دکمه‌ها بارگذاری‌راهنما بارگذاری‌رنگ بارگذاری‌رنگها \ -بارگذاری‌زبان بارگذاری‌ستونها بارگذاری‌سر بارگذاری‌سربرگ بارگذاری‌سرها بارگذاری‌سیستم \ -بارگذاری‌شرح بارگذاری‌شرح بارگذاری‌شرحها بارگذاری‌شماره‌زیرصفحه بارگذاری‌شماره‌سر بارگذاری‌شماره‌صفحه \ -بارگذاری‌شماره‌گذاری بارگذاری‌شماره‌گذاریها بارگذاری‌شماره‌گذاری‌صفحه بارگذاری‌شماره‌گذاری‌پاراگراف بارگذاری‌شماره‌‌گذاری‌خط بارگذاری‌شناور \ -بارگذاری‌شناورها بارگذاری‌شکافتن‌شناورها بارگذاری‌شکلهای‌خارجی بارگذاری‌طرح بارگذاری‌طرح‌بندی بارگذاری‌عرض‌خط \ -بارگذاری‌فاصله‌بین‌خط بارگذاری‌فرمولها بارگذاری‌فضای‌سفید بارگذاری‌فضا‌گذاری بارگذاری‌قالبی بارگذاری‌قلم‌متن \ -بارگذاری‌لوح بارگذاری‌لیست بارگذاری‌لیست‌ترکیبی بارگذاری‌لیست‌مرجع بارگذاری‌مترادفها بارگذاری‌متن \ -بارگذاری‌متنهای‌بالا بارگذاری‌متن‌سر بارگذاری‌متن‌سربرگ بارگذاری‌متن‌قالبی بارگذاری‌متن‌متنها بارگذاری‌متن‌پانوشت \ -بارگذاری‌متن‌پایین بارگذاری‌مجموعه‌نماد بارگذاری‌محیط‌قلم‌متن بارگذاری‌منوی‌پانل بارگذاری‌مکان‌گذاری بارگذاری‌میدان \ -بارگذاری‌میدانها بارگذاری‌میله‌تطابق بارگذاری‌میله‌زیر بارگذاری‌میله‌پانل بارگذاری‌نسخه‌ها بارگذاری‌نشانه‌شکستن \ -بارگذاری‌نشانه‌گذاری بارگذاری‌نشرها بارگذاری‌نقل بارگذاری‌پاراگرافها بارگذاری‌پانل بارگذاری‌پانوشتها \ -بارگذاری‌پایین بارگذاری‌پرده‌ها بارگذاری‌پرده‌پانل بارگذاری‌پروفایلها بارگذاری‌پرکردن‌خطها بارگذاری‌پس‌زمینه \ -بارگذاری‌پس‌زمینه‌ها بارگذاری‌چیدن بارگذاری‌گذارصفحه بارگذاری‌گروه‌آیتم بازنشانی بازنشانی‌نشانه‌گذاری \ -باگذاری‌متن‌برچسب بدون‌بلوکهای‌بیشتر بدون‌تورفتگی بدون‌خط‌بالاوپایین بدون‌خط‌سروته‌برگ بدون‌فایلهای‌بیشتر \ -بدون‌فضا بدون‌فضای‌سفید بدون‌لیست بدون‌نشانه‌گذاری برنامه بروبه \ -بروبه‌جعبه بروپایین برچسبها بلند بلوکهای‌پردازش بلوکها‌پنهان \ -بنویس‌بین‌لیست بنویس‌درثبت بنویس‌درلیست‌مرجع بنویس‌در‌لیست تاریخ تاریخ‌جاری \ -تاریخ‌رجوع تایپ تایپ‌بافر تایپ‌پرونده تب ترجمه \ -تطابق تعریف تعریف‌آرایش تعریف‌آرم تعریف‌الگوی‌جدول تعریف‌اندازه‌برگ \ -تعریف‌بافر تعریف‌بخش تعریف‌برنامه تعریف‌برچسب تعریف‌بلوک تعریف‌بلوک‌بخش \ -تعریف‌تایپ تعریف‌تایپ‌کردن تعریف‌تبدیل تعریف‌ترتیب تعریف‌توده‌میدان تعریف‌تورفتگی \ -تعریف‌ثبت تعریف‌جدول‌بندی تعریف‌خالی تعریف‌خروجی تعریف‌رنگ تعریف‌زیرمیدان \ -تعریف‌سر تعریف‌شرح تعریف‌شروع‌پایان تعریف‌شماره‌بندی تعریف‌شمایل‌مرجع تعریف‌شناور \ -تعریف‌قالبی تعریف‌قلم تعریف‌قلم‌متن تعریف‌لوح تعریف‌لیست تعریف‌لیست‌ترکیبی \ -تعریف‌لیست‌مرجع تعریف‌مترادفها تعریف‌مترادف‌قلم تعریف‌متن تعریف‌متن‌قالبی تعریف‌محیط‌قلم‌بدنه \ -تعریف‌مرجع تعریف‌منوی‌پانل تعریف‌منوی‌پانل تعریف‌میدان تعریف‌نسخه تعریف‌نشانه‌گذاری \ -تعریف‌نماد تعریف‌نمادشکل تعریف‌پاراگرافها تعریف‌پروفایل تعریف‌پوشش تعریف‌گروه‌رنگ \ -تعیین‌شماره‌سر تعیین‌محتوای‌متن تعیین‌مشخصات‌لیست تغییربه‌قلم‌بدنه تنظیم‌راست تنظیم‌طرح‌بندی \ -تنظیم‌وسط تورفتگی توری توضیح تک ثبت‌زوج \ -ثبت‌کامل جداسازی‌نشانه‌گذاری حاش حرف حرفها حفظ‌بلوکها \ -خالی خطهای‌سیاه خطهای‌نازک خطها‌خالی خط‌حاشیه خط‌زدن \ -خط‌زدنها خط‌سیاه خط‌متن خط‌مو خط‌نازک خ‌ا \ -خ‌ع در درج‌آرمها درج‌ثبت درج‌ثبت درج‌درخط \ -درج‌درخطها درج‌درمتن درج‌درمیدان درج‌در‌بالای‌یکدیگر درج‌در‌توری درج‌راهنما \ -درج‌زیرفرمول درج‌شناور درج‌فرمول درج‌لیست درج‌لیست‌مختلط درج‌لیست‌مختلط \ -درج‌لیست‌مرجع درج‌پانوشتها درج‌پانوشتهای‌موضعی درج‌چوب‌خط درج‌کنار‌به‌کنار درحاشیه \ -درحاشیه‌دیگر درخارجی درخط درداخلی درراست درصفحه \ -درقالبی درمورد درون درچپ دریافت‌بافر دریافت‌نشانه \ -دوران دکمه دکمه‌پانل رج رجوع رنگ \ -رنگ‌خاکستری روزهفته ریاضی زبان زبان‌اصلی ستون \ -سر شماره‌سر شماره‌سرجاری شماره‌مبدل شماره‌ها شکافتن‌شناور \ -شکل‌خارجی صفحه صفحه‌زوج صفحه‌پردازش عبوربلوکها فشرده \ -فضا فضاهای‌ثابت فضای‌سفید فضای‌سفیدصحیح قالبی لوح‌مقایسه \ -ماه متن‌برچسب متن‌حاشیه متن‌سر متن‌پانوشت مرجع \ -مرجع‌صفحه مرجع‌متن مقایسه‌گروه‌رنگ مقیاس منفی مکان \ -میدان میدانهای‌گزارش میدان‌شبیه‌سازی میدان‌پشته میدان‌کپی میله‌تطابق \ -میله‌رو میله‌زیر میله‌ها‌رو میله‌پانل میله‌‌های‌زیر نسخه \ -نسخه‌نشانه نشانه‌گذاری نشانه‌گذاری‌زوج نشر نصب‌زبان نقطه‌ها \ -نقل نقل‌قول نم نماد نمادلیست نمایش‌آرایش \ -نمایش‌بارگذاریها نمایش‌بستها نمایش‌توری نمایش‌رنگ نمایش‌شکلهای‌خارجی نمایش‌طرح‌بندی \ -نمایش‌قالب نمایش‌قلم‌بدنه نمایش‌لوح نمایش‌مجموعه‌علامت نمایش‌محیط‌قلم‌بدنه نمایش‌میدانها \ -نمایش‌چاپ نمایش‌گروه‌رنگ نوشتارزوج نوع‌صفحه پابا پانوشت \ -پایین پرده پرکردن‌میدان پس‌زمینه پیروی‌نسخه پیروی‌نسخه‌پروفایل \ -پیروی‌پروفایل چوبخط چپ‌چین کاغذزوج کسر کشیده \ -کلمه‌راست گیره یادداشت یک‌جا یک‌خط \ No newline at end of file +keywordclass.macros.context.pe=CAP Cap Caps Character Characters MONTH Romannumerals WEEKDAY WORD WORDS Word Words appendix cap chapter chem completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms coupledregister crlf definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling definetypeface description enumeration framedtext indentation its labeling loadsorts loadsynonyms mapfontsize mediaeval name nextsection nocap overbar overbars overstrike overstrikes paragraph part placelistoffloats placelistofsorts placelistofsynonyms ran register reservefloat resettextcontent section seeregister setupcapitals setupfonthandling setupfontsynonym setupinterlinespace2 setuplistalternative setupurl sort startalignment startbuffer startbuffer startcolumns startcombination startdescription startdocument startenumeration startfigure startfloattext startformula startframedtext starthiding startitemgroup startlegend startline startlinecorrection startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock startnamemakeup startnarrower startopposite startoverlay startoverview startparagraph startpositioning startpostponing startprofile startregister startsymbolset startsynchronization starttable starttables starttabulate starttyping startunpacked startتوضیح startتولید startحقیقت startخط‌حاشیه startخط‌متن startرنگ startفشرده startمحیط startمنوی‌پانل startمولفه startنسخه startنقل‌قول startپروژه startپس‌زمینه stopalignment stopbuffer stopbuffer stopcolumns stopcombination stopdescription stopdocument stopenumeration stopfigure stopfloattext stopformula stopframedtext stophiding stopitemgroup stoplegend stopline stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing stopprofile stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptyping stopunpacked stopتوضیح stopتولید stopحقیقت stopخط‌حاشیه stopخط‌متن stopرنگ stopفشرده stopمحیط stopمنوی‌پانل stopمولفه stopنسخه stopنقل‌قول stopپروژه stopپس‌زمینه sub subject subsection subsubject subsubsection subsubsubject synonym title tooltip typ underbar underbars useURL usedirectory آیتم آیتمها آینه اجباربلوکها از ازکارانداختن‌منوی‌پانل استفاده‌بلوکها استفاده‌دستخط‌تایپ استفاده‌رمزینه استفاده‌شکل‌خارجی استفاده‌فرمانها استفاده‌قطعه‌موزیک‌خارجی استفاده‌مدول استفاده‌مرجعها استفاده‌نمادها استفاده‌نوشتارخارجی استفاده‌ویژگیها استفاده‌پرونده‌خارجی استفاده‌پرونده‌دستخط‌تایپ استفاده‌پرونده‌های‌خارجی اعدادلاتین اما انتخاب‌برگ انتخاب‌بلوکها انتخاب‌نسخه انتقال‌به‌توری بارگذاری‌آرایش بارگذاری‌آیتمها بارگذاری‌ارجاع بارگذاری‌اندازه‌برگ بارگذاری‌باریکتر بارگذاری‌بافر بارگذاری‌بالا بارگذاری‌بخش بارگذاری‌بردباری بارگذاری‌برنامه‌ها بارگذاری‌برگ بارگذاری‌بست بارگذاری‌بلوک بارگذاری‌بلوکهای‌حاشیه بارگذاری‌بلوک‌بخش بارگذاری‌تایپ بارگذاری‌تایپ‌کردن بارگذاری‌تب بارگذاری‌ترتیب بارگذاری‌ترتیب بارگذاری‌ترکیب‌ها بارگذاری‌تطابق بارگذاری‌تعریف‌پانوشت بارگذاری‌تنظیم بارگذاری‌ته‌برگ بارگذاری‌تورفتگی بارگذاری‌تورفتگیها بارگذاری‌توضیح بارگذاری‌ثبت بارگذاری‌جدولها بارگذاری‌جدول‌بندی بارگذاری‌خالی بارگذاری‌خروجی بارگذاری‌خطها بارگذاری‌خطهای‌حاشیه بارگذاری‌خطهای‌سیاه بارگذاری‌خطهای‌متن بارگذاری‌خطها‌ی‌نازک بارگذاری‌درج‌درخطها بارگذاری‌درج‌مخالف بارگذاری‌درون‌حاشیه بارگذاری‌دوران بارگذاری‌دکمه‌ها بارگذاری‌راهنما بارگذاری‌رنگ بارگذاری‌رنگها بارگذاری‌زبان بارگذاری‌ستونها بارگذاری‌سر بارگذاری‌سربرگ بارگذاری‌سرها بارگذاری‌سیستم بارگذاری‌شرح بارگذاری‌شرح بارگذاری‌شرحها بارگذاری‌شماره‌زیرصفحه بارگذاری‌شماره‌سر بارگذاری‌شماره‌صفحه بارگذاری‌شماره‌گذاری بارگذاری‌شماره‌گذاریها بارگذاری‌شماره‌گذاری‌صفحه بارگذاری‌شماره‌گذاری‌پاراگراف بارگذاری‌شماره‌‌گذاری‌خط بارگذاری‌شناور بارگذاری‌شناورها بارگذاری‌شکافتن‌شناورها بارگذاری‌شکلهای‌خارجی بارگذاری‌طرح بارگذاری‌طرح‌بندی بارگذاری‌عرض‌خط بارگذاری‌فاصله‌بین‌خط بارگذاری‌فرمولها بارگذاری‌فضای‌سفید بارگذاری‌فضا‌گذاری بارگذاری‌قالبی بارگذاری‌قلم‌متن بارگذاری‌لوح بارگذاری‌لیست بارگذاری‌لیست‌ترکیبی بارگذاری‌لیست‌مرجع بارگذاری‌مترادفها بارگذاری‌متن بارگذاری‌متنهای‌بالا بارگذاری‌متن‌سر بارگذاری‌متن‌سربرگ بارگذاری‌متن‌قالبی بارگذاری‌متن‌متنها بارگذاری‌متن‌پانوشت بارگذاری‌متن‌پایین بارگذاری‌مجموعه‌نماد بارگذاری‌محیط‌قلم‌متن بارگذاری‌منوی‌پانل بارگذاری‌مکان‌گذاری بارگذاری‌میدان بارگذاری‌میدانها بارگذاری‌میله‌تطابق بارگذاری‌میله‌زیر بارگذاری‌میله‌پانل بارگذاری‌نسخه‌ها بارگذاری‌نشانه‌شکستن بارگذاری‌نشانه‌گذاری بارگذاری‌نشرها بارگذاری‌نقل بارگذاری‌پاراگرافها بارگذاری‌پانل بارگذاری‌پانوشتها بارگذاری‌پایین بارگذاری‌پرده‌ها بارگذاری‌پرده‌پانل بارگذاری‌پروفایلها بارگذاری‌پرکردن‌خطها بارگذاری‌پس‌زمینه بارگذاری‌پس‌زمینه‌ها بارگذاری‌چیدن بارگذاری‌گذارصفحه بارگذاری‌گروه‌آیتم بازنشانی بازنشانی‌نشانه‌گذاری باگذاری‌متن‌برچسب بدون‌بلوکهای‌بیشتر بدون‌تورفتگی بدون‌خط‌بالاوپایین بدون‌خط‌سروته‌برگ بدون‌فایلهای‌بیشتر بدون‌فضا بدون‌فضای‌سفید بدون‌لیست بدون‌نشانه‌گذاری برنامه بروبه بروبه‌جعبه بروپایین برچسبها بلند بلوکهای‌پردازش بلوکها‌پنهان بنویس‌بین‌لیست بنویس‌درثبت بنویس‌درلیست‌مرجع بنویس‌در‌لیست تاریخ تاریخ‌جاری تاریخ‌رجوع تایپ تایپ‌بافر تایپ‌پرونده تب ترجمه تطابق تعریف تعریف‌آرایش تعریف‌آرم تعریف‌الگوی‌جدول تعریف‌اندازه‌برگ تعریف‌بافر تعریف‌بخش تعریف‌برنامه تعریف‌برچسب تعریف‌بلوک تعریف‌بلوک‌بخش تعریف‌تایپ تعریف‌تایپ‌کردن تعریف‌تبدیل تعریف‌ترتیب تعریف‌توده‌میدان تعریف‌تورفتگی تعریف‌ثبت تعریف‌جدول‌بندی تعریف‌خالی تعریف‌خروجی تعریف‌رنگ تعریف‌زیرمیدان تعریف‌سر تعریف‌شرح تعریف‌شروع‌پایان تعریف‌شماره‌بندی تعریف‌شمایل‌مرجع تعریف‌شناور تعریف‌قالبی تعریف‌قلم تعریف‌قلم‌متن تعریف‌لوح تعریف‌لیست تعریف‌لیست‌ترکیبی تعریف‌لیست‌مرجع تعریف‌مترادفها تعریف‌مترادف‌قلم تعریف‌متن تعریف‌متن‌قالبی تعریف‌محیط‌قلم‌بدنه تعریف‌مرجع تعریف‌منوی‌پانل تعریف‌منوی‌پانل تعریف‌میدان تعریف‌نسخه تعریف‌نشانه‌گذاری تعریف‌نماد تعریف‌نمادشکل تعریف‌پاراگرافها تعریف‌پروفایل تعریف‌پوشش تعریف‌گروه‌رنگ تعیین‌شماره‌سر تعیین‌محتوای‌متن تعیین‌مشخصات‌لیست تغییربه‌قلم‌بدنه تنظیم‌راست تنظیم‌طرح‌بندی تنظیم‌وسط تورفتگی توری توضیح تک ثبت‌زوج ثبت‌کامل جداسازی‌نشانه‌گذاری حاش حرف حرفها حفظ‌بلوکها خالی خطهای‌سیاه خطهای‌نازک خطها‌خالی خط‌حاشیه خط‌سیاه خط‌متن خط‌مو خط‌نازک خ‌ا خ‌ع در درج‌آرمها درج‌ثبت درج‌ثبت درج‌درخط درج‌درخطها درج‌درمتن درج‌درمیدان درج‌در‌بالای‌یکدیگر درج‌در‌توری درج‌راهنما درج‌زیرفرمول درج‌شناور درج‌فرمول درج‌لیست درج‌لیست‌مختلط درج‌لیست‌مختلط درج‌لیست‌مرجع درج‌پانوشتها درج‌پانوشتهای‌موضعی درج‌چوب‌خط درج‌کنار‌به‌کنار درحاشیه درحاشیه‌دیگر درخارجی درخط درداخلی درراست درصفحه درقالبی درمورد درون درچپ دریافت‌بافر دریافت‌نشانه دوران دکمه دکمه‌پانل رج رجوع رنگ رنگ‌خاکستری روزهفته ریاضی زبان زبان‌اصلی ستون سر شماره‌سر شماره‌سرجاری شماره‌مبدل شماره‌ها شکافتن‌شناور شکل‌خارجی صفحه صفحه‌زوج صفحه‌پردازش عبوربلوکها فشرده فضا فضاهای‌ثابت فضای‌سفید فضای‌سفیدصحیح قالبی لوح‌مقایسه ماه متن‌برچسب متن‌حاشیه متن‌سر متن‌پانوشت مرجع مرجع‌صفحه مرجع‌متن مقایسه‌گروه‌رنگ مقیاس منفی مکان میدان میدانهای‌گزارش میدان‌شبیه‌سازی میدان‌پشته میدان‌کپی میله‌تطابق میله‌پانل نسخه نسخه‌نشانه نشانه‌گذاری نشانه‌گذاری‌زوج نشر نصب‌زبان نقطه‌ها نقل نقل‌قول نم نماد نمادلیست نمایش‌آرایش نمایش‌بارگذاریها نمایش‌بستها نمایش‌توری نمایش‌رنگ نمایش‌شکلهای‌خارجی نمایش‌طرح‌بندی نمایش‌قالب نمایش‌قلم‌بدنه نمایش‌لوح نمایش‌مجموعه‌علامت نمایش‌محیط‌قلم‌بدنه نمایش‌میدانها نمایش‌چاپ نمایش‌گروه‌رنگ نوشتارزوج نوع‌صفحه پابا پانوشت پایین پرده پرکردن‌میدان پس‌زمینه پیروی‌نسخه پیروی‌نسخه‌پروفایل پیروی‌پروفایل چوبخط چپ‌چین کاغذزوج کسر کشیده کلمه‌راست گیره یادداشت یک‌جا یک‌خط \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/cont-ro-scite.properties b/Master/texmf-dist/context/data/scite/cont-ro-scite.properties index da9af1b3ae1..47fd2b9c3ce 100644 --- a/Master/texmf-dist/context/data/scite/cont-ro-scite.properties +++ b/Master/texmf-dist/context/data/scite/cont-ro-scite.properties @@ -1,97 +1 @@ -keywordclass.macros.context.ro=\ -CAP CUVANT CUVINTE Cap Caps Cuvant \ -Cuvinte LUNA Litera Litere Numereromane ZIDINSAPTAMANA \ -adapteazaaspect adubuffer adumarcaje afiseazaaspect afiseazacampuri afiseazaculoare \ -afiseazafiguriexterne afiseazafonttext afiseazagrid afiseazagrupculoare afiseazamakeup afiseazamediufonttext \ -afiseazapaleta afiseazarama afiseazasetari afiseazasetsimboluri afiseazastruts afiseazatiparire \ -aliniat aliniatcentru aliniatdreapta aliniatstanga appendix ascundeblocuri \ -barainteractiune barasincronizare barasus blanc but butoaneinteractiune \ -buton camp campumplere cap chapter chem \ -citat clip cloneazacamp coloana comentariu comparagrupculoare \ -comparapaleta completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister \ -convertestenumar copiazacamp corecteazaspatiualb coupledregister crlf culoare \ -culoaregri cupleazadocument cupleazamarcaje cupleazaregistru cuvantdreapta data \ -datacurenta datareferit decupleazamarcaje definebodyfontDEF definebodyfontREF definedfont \ -definefontfeature definefonthandling defineste definestealiat definesteantet definesteblanc \ -definestebloc definesteblocsectiune definestebuffer definestecamp definesteconversie definesteculoare \ -definestedescriere definestedimensiunehartie definesteenumerare definesteeticheta definestefloat definestefont \ -definestefonttext definesteformatreferinte definestegrupculori definesteinconjurare definestelista definestelistacombinata \ -definestelistareferinte definestelogo definestemakeup definestemarcaje definestemediulfonttext definestemeniuinteractiune \ -definestemeniuinteractiune definesteoutput definesteoverlay definestepaleta definesteparagraf definesteprofil \ -definesteprogram definestereferinte definesteregistru definestesablontabel definestesectiune definestesimbol \ -definestesimbolfigura definestesinonim definestesinonimfont definestesortare definestestartstop definestestivacampuri \ -definestesubcamp definestetabulatori definestetext definestetexteinconjurate definestetextinconjurat definestetyping \ -definesteversiune definetype definetypeface description despre determinacaracteristicilelistei \ -determinanumartitlu dezactiveazameniuinteractiune din dute dutebox ecran \ -el element element emptylines enumeration etichete \ -faraaliniat farafisiere faraliniiantetsisubsol faraliniisussijos faralista faramarcaje \ -faraspatiu faraspatiualb figuraexterna firdepar folosesteURL folosestebloc \ -folosestecodificarea folosestecomenzi folosestedocumentextern folosestefiguraexterna folosestefisiereexterne folosestefisierextern \ -folosestemodul folosestemuzicaexterna folosestereferinte folosestesimboluri folosestespeciale footnotetext \ -forteazablocuri fractie framed framedtext fundal gatablocuri \ -grid hartiedubla hl impachetat impartefloat in \ -inalt inaltamargine indentation indreapta inframed ininner \ -injos inlinie inouter instalarelimba instanga intins \ -jos jossus la labeling lapagina limba \ -limbaprincipala liniemargine linieneagra liniepeste liniesubtire linieumplere \ -liniinegre liniipeste liniisubtiri listsymbol litera litere \ -loadsorts loadsynonyms logcampuri luna mapfontsize mar \ -marcaje marcheazaversiune marginal matematica mediaeval minicitat \ -mutapegrid name nextsection nocap nop nota \ -notasubsol numartitlu numartitlucurent numere numereromane olinie \ -pagina paginadubla paragraph part pastreazablocuri placefloat \ -placelistoffloats placelistofsorts placelistofsynonyms placereferencelist plaseazapegrid plaseazasemnecarte \ -potrivestecamp pozitie proceseazabloc proceseazapagina program publicatie \ -puncte punedeasuprafiecareia punefatainfata puneformula punelegenda punelista \ -punelistacombinata punelistacombinata punelogouri punenotesubsol punenotesubsollocale puneregistru \ -puneregistru punesubformula ran ref referinta referintapagina \ -referintatext referit reflexie register reservefloat reset \ -reseteazamarcaje resettextcontent riglatext rigleumplere roteste saripesteblocuri \ -scala scriebuffer scrieinlista scrieinlistareferinte scrieinregistru scrieintreliste \ -section seeregister selecteazablocuri selecteazahartie selecteazaversiune semncarte \ -setarebarasincronizare setarelimba setareoutput setarepozitie setaresincronizare seteazaaliniat \ -seteazaaliniate seteazaalinierea seteazaantet seteazaaranjareapag seteazaaspect seteazabarainteractiune \ -seteazablanc seteazabloc seteazablocsectiune seteazablocurimarginale seteazabuffer seteazabutoane \ -seteazacamp seteazacampuri seteazaclipping seteazacoloane seteazacombinari seteazacomentariu \ -seteazaculoare seteazaculori seteazadefinireanotasubsol seteazadescriere seteazadimensiunihartie seteazaecrane \ -seteazaecraninteractiune seteazaelemente seteazaenumerare seteazafiguriexterne seteazafloat seteazafloats \ -seteazafonttext seteazaformulare seteazaformule seteazafundal seteazafundaluri seteazagrosimelinie \ -seteazaimpartireafloat seteazainconjurat seteazaingust seteazainteractiunea seteazajos seteazalegenda \ -seteazalegenda seteazalegendele seteazaliniesilabe seteazaliniesubtire seteazalinii seteazaliniimargine \ -seteazaliniinegre seteazaliniiumplere seteazalista seteazalistacombinata seteazalistareferinte seteazamajuscule \ -seteazamakeup seteazamarcaje seteazamarginal seteazamediulfonttext seteazameniuinteractiune seteazaminicitat \ -seteazanotasubsol seteazanumarpagina seteazanumarsubpagina seteazanumartitlu seteazanumerotare seteazanumerotarelinii \ -seteazanumerotarepagina seteazanumerotareparagrafe seteazapaleta seteazaparagrafe seteazaplasareaopozita seteazaprofile \ -seteazaprograme seteazapublicatii seteazareferinte seteazaregistru seteazarigletext seteazarigleumplere \ -seteazarotare seteazasectiune seteazasimbol seteazasinonime seteazasistem seteazasortare \ -seteazaspatiu seteazaspatiualb seteazaspatiuinterliniar seteazastrut seteazasublinie seteazasubsol \ -seteazasus seteazatab seteazatabele seteazatabulatori seteazatext seteazatexteantet \ -seteazatextejos seteazatextesubsol seteazatextesus seteazatextetext seteazatexteticheta seteazatexttitlu \ -seteazatitlu seteazatitluri seteazatoleranta seteazatranzitiepagina seteazatype seteazatyping \ -seteazaurl seteazaversiuni settextcontent setupfonthandling setupfontsynonym setupinterlinespace2 \ -setupitemgroup setuplistalternative setuppaper sim simbol sincronizeaza \ -sort spatiifixate spatiu spatiualb startalignment startbuffer \ -startbuffer startcitat startcolumns startcombination startcomentariu startcomponenta \ -startculoare startdescription startdocument startenumeration startfact startfigure \ -startfloattext startformula startframedtext startfundal starthiding startimpachetat \ -startitemgroup startlegend startline startlinecorrection startlinenumbering startlines \ -startliniemargine startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \ -startmediu startmeniuinteractiune startnamemakeup startnarrower startopposite startoverlay \ -startoverview startparagraph startpositioning startpostponing startprodus startprofile \ -startproiect startregister startriglatext startsymbolset startsynchronization starttable \ -starttables starttabulate starttyping startunpacked startversiune stivacampuri \ -stopalignment stopbuffer stopbuffer stopcitat stopcolumns stopcombination \ -stopcomentariu stopcomponenta stopculoare stopdescription stopdocument stopenumeration \ -stopfact stopfigure stopfloattext stopformula stopframedtext stopfundal \ -stophiding stopimpachetat stopitemgroup stoplegend stopline stoplinecorrection \ -stoplinenumbering stoplines stopliniemargine stoplocal stoplocalenvironment stoplocalfootnotes \ -stopmakeup stopmarginblock stopmediu stopmeniuinteractiune stopnamemakeup stopnarrower \ -stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \ -stopprodus stopprofile stopproiect stopriglatext stopsymbolset stopsynchronization \ -stoptable stoptables stoptabulate stoptyping stopunpacked stopversiune \ -sub subject sublinie sublinii subsection subsubject \ -subsubsection subsubsubject supralinie synonym tab tex \ -texteticheta textmarginal texttitlu textumplere tippagina title \ -titlu tooltip traduce trecilafonttext typ type \ -typefile undeva urmeazaprofil urmeazaversiune urmeazaversiuneprofil usedirectory \ -usetypescript usetypescriptfile versiune vl zidinsaptamana \ No newline at end of file +keywordclass.macros.context.ro=CAP CUVANT CUVINTE Cap Caps Cuvant Cuvinte LUNA Litera Litere Numereromane ZIDINSAPTAMANA adapteazaaspect adubuffer adumarcaje afiseazaaspect afiseazacampuri afiseazaculoare afiseazafiguriexterne afiseazafonttext afiseazagrid afiseazagrupculoare afiseazamakeup afiseazamediufonttext afiseazapaleta afiseazarama afiseazasetari afiseazasetsimboluri afiseazastruts afiseazatiparire aliniat aliniatcentru aliniatdreapta aliniatstanga appendix ascundeblocuri barainteractiune barasincronizare blanc but butoaneinteractiune buton camp campumplere cap chapter chem citat clip cloneazacamp coloana comentariu comparagrupculoare comparapaleta completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister convertestenumar copiazacamp corecteazaspatiualb coupledregister crlf culoare culoaregri cupleazadocument cupleazamarcaje cupleazaregistru cuvantdreapta data datacurenta datareferit decupleazamarcaje definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling defineste definestealiat definesteantet definesteblanc definestebloc definesteblocsectiune definestebuffer definestecamp definesteconversie definesteculoare definestedescriere definestedimensiunehartie definesteenumerare definesteeticheta definestefloat definestefont definestefonttext definesteformatreferinte definestegrupculori definesteinconjurare definestelista definestelistacombinata definestelistareferinte definestelogo definestemakeup definestemarcaje definestemediulfonttext definestemeniuinteractiune definestemeniuinteractiune definesteoutput definesteoverlay definestepaleta definesteparagraf definesteprofil definesteprogram definestereferinte definesteregistru definestesablontabel definestesectiune definestesimbol definestesimbolfigura definestesinonim definestesinonimfont definestesortare definestestartstop definestestivacampuri definestesubcamp definestetabulatori definestetext definestetexteinconjurate definestetextinconjurat definestetyping definesteversiune definetype definetypeface description despre determinacaracteristicilelistei determinanumartitlu dezactiveazameniuinteractiune din dute dutebox ecran el element element emptylines enumeration etichete faraaliniat farafisiere faraliniiantetsisubsol faraliniisussijos faralista faramarcaje faraspatiu faraspatiualb figuraexterna firdepar folosesteURL folosestebloc folosestecodificarea folosestecomenzi folosestedocumentextern folosestefiguraexterna folosestefisiereexterne folosestefisierextern folosestemodul folosestemuzicaexterna folosestereferinte folosestesimboluri folosestespeciale footnotetext forteazablocuri fractie framed framedtext fundal gatablocuri grid hartiedubla hl impachetat impartefloat in inalt inaltamargine indentation indreapta inframed ininner injos inlinie inouter instalarelimba instanga intins jos jossus la labeling lapagina limba limbaprincipala liniemargine linieneagra liniesubtire linieumplere liniinegre liniisubtiri listsymbol litera litere loadsorts loadsynonyms logcampuri luna mapfontsize mar marcaje marcheazaversiune marginal matematica mediaeval minicitat mutapegrid name nextsection nocap nop nota notasubsol numartitlu numartitlucurent numere numereromane olinie overbar overbars overstrike overstrikes pagina paginadubla paragraph part pastreazablocuri placefloat placelistoffloats placelistofsorts placelistofsynonyms placereferencelist plaseazapegrid plaseazasemnecarte potrivestecamp pozitie proceseazabloc proceseazapagina program publicatie puncte punedeasuprafiecareia punefatainfata puneformula punelegenda punelista punelistacombinata punelistacombinata punelogouri punenotesubsol punenotesubsollocale puneregistru puneregistru punesubformula ran ref referinta referintapagina referintatext referit reflexie register reservefloat reset reseteazamarcaje resettextcontent riglatext rigleumplere roteste saripesteblocuri scala scriebuffer scrieinlista scrieinlistareferinte scrieinregistru scrieintreliste section seeregister selecteazablocuri selecteazahartie selecteazaversiune semncarte setarebarasincronizare setarelimba setareoutput setarepozitie setaresincronizare seteazaaliniat seteazaaliniate seteazaalinierea seteazaantet seteazaaranjareapag seteazaaspect seteazabarainteractiune seteazablanc seteazabloc seteazablocsectiune seteazablocurimarginale seteazabuffer seteazabutoane seteazacamp seteazacampuri seteazaclipping seteazacoloane seteazacombinari seteazacomentariu seteazaculoare seteazaculori seteazadefinireanotasubsol seteazadescriere seteazadimensiunihartie seteazaecrane seteazaecraninteractiune seteazaelemente seteazaenumerare seteazafiguriexterne seteazafloat seteazafloats seteazafonttext seteazaformulare seteazaformule seteazafundal seteazafundaluri seteazagrosimelinie seteazaimpartireafloat seteazainconjurat seteazaingust seteazainteractiunea seteazajos seteazalegenda seteazalegenda seteazalegendele seteazaliniesilabe seteazaliniesubtire seteazalinii seteazaliniimargine seteazaliniinegre seteazaliniiumplere seteazalista seteazalistacombinata seteazalistareferinte seteazamajuscule seteazamakeup seteazamarcaje seteazamarginal seteazamediulfonttext seteazameniuinteractiune seteazaminicitat seteazanotasubsol seteazanumarpagina seteazanumarsubpagina seteazanumartitlu seteazanumerotare seteazanumerotarelinii seteazanumerotarepagina seteazanumerotareparagrafe seteazapaleta seteazaparagrafe seteazaplasareaopozita seteazaprofile seteazaprograme seteazapublicatii seteazareferinte seteazaregistru seteazarigletext seteazarigleumplere seteazarotare seteazasectiune seteazasimbol seteazasinonime seteazasistem seteazasortare seteazaspatiu seteazaspatiualb seteazaspatiuinterliniar seteazastrut seteazasublinie seteazasubsol seteazasus seteazatab seteazatabele seteazatabulatori seteazatext seteazatexteantet seteazatextejos seteazatextesubsol seteazatextesus seteazatextetext seteazatexteticheta seteazatexttitlu seteazatitlu seteazatitluri seteazatoleranta seteazatranzitiepagina seteazatype seteazatyping seteazaurl seteazaversiuni settextcontent setupfonthandling setupfontsynonym setupinterlinespace2 setupitemgroup setuplistalternative setuppaper sim simbol sincronizeaza sort spatiifixate spatiu spatiualb startalignment startbuffer startbuffer startcitat startcolumns startcombination startcomentariu startcomponenta startculoare startdescription startdocument startenumeration startfact startfigure startfloattext startformula startframedtext startfundal starthiding startimpachetat startitemgroup startlegend startline startlinecorrection startlinenumbering startlines startliniemargine startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock startmediu startmeniuinteractiune startnamemakeup startnarrower startopposite startoverlay startoverview startparagraph startpositioning startpostponing startprodus startprofile startproiect startregister startriglatext startsymbolset startsynchronization starttable starttables starttabulate starttyping startunpacked startversiune stivacampuri stopalignment stopbuffer stopbuffer stopcitat stopcolumns stopcombination stopcomentariu stopcomponenta stopculoare stopdescription stopdocument stopenumeration stopfact stopfigure stopfloattext stopformula stopframedtext stopfundal stophiding stopimpachetat stopitemgroup stoplegend stopline stoplinecorrection stoplinenumbering stoplines stopliniemargine stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopmediu stopmeniuinteractiune stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview stopparagraph stoppositioning stoppostponing stopprodus stopprofile stopproiect stopriglatext stopsymbolset stopsynchronization stoptable stoptables stoptabulate stoptyping stopunpacked stopversiune sub subject subsection subsubject subsubsection subsubsubject synonym tab tex texteticheta textmarginal texttitlu textumplere tippagina title titlu tooltip traduce trecilafonttext typ type typefile underbar underbars undeva urmeazaprofil urmeazaversiune urmeazaversiuneprofil usedirectory usetypescript usetypescriptfile versiune vl zidinsaptamana \ No newline at end of file diff --git a/Master/texmf-dist/context/data/scite/context.properties b/Master/texmf-dist/context/data/scite/context.properties index cd1e8b36723..13fa079c02e 100644 --- a/Master/texmf-dist/context/data/scite/context.properties +++ b/Master/texmf-dist/context/data/scite/context.properties @@ -27,6 +27,14 @@ # if PLAT_WIN # find.command=fgrep -G -n $(find.what) $(find.files) +# Just UTF-8 + +code.page=65001 +output.code.page=65001 + +textwrapper.margin=4 +textwrapper.length=68 + # ConTeXt: suffixes (really needed) file.patterns.tex= @@ -47,12 +55,22 @@ import latex-scite # Example : patterns file.patterns.xml= -file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx +file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx;*.ctx filter.example=eXaMpLe|$(file.patterns.example)| lexer.$(file.patterns.example)=xml +# Lua : patterns + +file.patterns.lua=*.lua;*.luc;*.cld;*.tuc;*.luj + +filter.lua=Lua MkIV|$(file.patterns.lua)| +lexer.$(file.patterns.lua)=lua + +command.compile.$(file.patterns.lua)=mtxrun --script "$(FileNameExt)" +command.go.$(file.patterns.lua)=mtxrun --script "$(FileNameExt)" + # Commands: some scripts if PLAT_WIN @@ -76,7 +94,7 @@ name.metafun.console=$(name.context.console) name.example.console=$(name.context.console) if PLAT_WIN - name.context.mtxrun=mtxrun.cmd --autogenerate + name.context.mtxrun=mtxrun --autogenerate if PLAT_GTK name.context.mtxrun=mtxrun --autogenerate @@ -94,13 +112,10 @@ name.example.xmlcheck=tidy -quiet -utf8 -xml -errors name.metafun.mptopdf=$(name.context.mtxrun) --script mptopdf -# wwwserver --start --port=8061 --url=http://localhost:8061 --forcetemp --direct -# wwwserver.rb --direct - # if needed one can set MTX_SERVER_ROOT to the root of the documentation if PLAT_WIN - name.context.wwwserver=cmd /c start /min "Context Documentation" $(name.context.mtxrun) --script server --start + name.context.wwwserver=cmd /c start /min "Context Documentation" $(name.context.mtxrun) --script server --auto if PLAT_GTK name.context.wwwserver=$(name.context.mtxrun) --script server --start > ~/context-wwwserver.log & diff --git a/Master/texmf-dist/context/data/scite/scite-ctx.lua b/Master/texmf-dist/context/data/scite/scite-ctx.lua new file mode 100644 index 00000000000..8e6b6ebab74 --- /dev/null +++ b/Master/texmf-dist/context/data/scite/scite-ctx.lua @@ -0,0 +1,838 @@ +-- version : 1.0.0 - 07/2005 (2008: lua 5.1) +-- author : Hans Hagen - PRAGMA ADE - www.pragma-ade.com +-- copyright : public domain or whatever suits +-- remark : part of the context distribution, my first lua code + +-- todo: name space for local functions + +-- loading: scite-ctx.properties + +-- # environment variable +-- # +-- # CTXSPELLPATH=t:/spell +-- # +-- # auto language detection +-- # +-- # % version =1.0 language=uk +-- # + +-- ext.lua.startup.script=$(SciteDefaultHome)/scite-ctx.lua +-- +-- # extension.$(file.patterns.context)=scite-ctx.lua +-- # extension.$(file.patterns.example)=scite-ctx.lua +-- +-- # ext.lua.reset=1 +-- # ext.lua.auto.reload=1 +-- # ext.lua.startup.script=t:/lua/scite-ctx.lua +-- +-- ctx.menulist.default=\ +-- wrap=wrap_text|\ +-- unwrap=unwrap_text|\ +-- sort=sort_text|\ +-- document=document_text|\ +-- quote=quote_text|\ +-- compound=compound_text|\ +-- check=check_text +-- +-- ctx.spellcheck.language=auto +-- ctx.spellcheck.wordsize=4 +-- ctx.spellcheck.wordpath=ENV(CTXSPELLPATH) +-- +-- ctx.spellcheck.wordfile.all=spell-uk.txt,spell-nl.txt +-- +-- ctx.spellcheck.wordfile.uk=spell-uk.txt +-- ctx.spellcheck.wordfile.nl=spell-nl.txt +-- ctx.spellcheck.wordsize.uk=4 +-- ctx.spellcheck.wordsize.nl=4 +-- +-- command.name.21.*=CTX Action List +-- command.subsystem.21.*=3 +-- command.21.*=show_menu $(ctx.menulist.default) +-- command.groupundo.21.*=yes +-- command.shortcut.21.*=Shift+F11 +-- +-- command.name.22.*=CTX Check Text +-- command.subsystem.22.*=3 +-- command.22.*=check_text +-- command.groupundo.22.*=yes +-- command.shortcut.22.*=Ctrl+L +-- +-- command.name.23.*=CTX Wrap Text +-- command.subsystem.23.*=3 +-- command.23.*=wrap_text +-- command.groupundo.23.*=yes +-- command.shortcut.23.*=Ctrl+M +-- +-- # command.21.*=check_text +-- # command.21.*=dofile e:\context\lua\scite-ctx.lua + +-- generic functions + +props = props or { } -- setmetatable(props,{ __index = function(k,v) props[k] = "unknown" return "unknown" end } ) + +local byte, lower, upper, gsub, sub, find, rep, match, gmatch = string.byte, string.lower, string.upper, string.gsub, string.sub, string.find, string.rep, string.match, string.gmatch +local sort, concat = table.sort, table.concat + +local crlf = "\n" + +function traceln(str) + trace(str .. crlf) + io.flush() +end + +function string:grab(delimiter) + local list = {} + for snippet in self:gmatch(delimiter) do + list[#list+1] = snippet + end + return list +end + +function string:expand() + return (self:gsub("ENV%((%w+)%)", os.envvar)) +end + +function string:strip() + return (self:gsub("^%s*(.-)%s*$", "%1")) +end + +function table.alphasort(list,i) + if i and i > 0 then + local function alphacmp(a,b) + return lower(gsub(sub(a,i),'0',' ')) < lower(gsub(sub(b,i),'0',' ')) + end + sort(list,alphacmp) + else + local function alphacmp(a,b) + return lower(a) < lower(b) + end + sort(list,alphacmp) + end +end + +function io.exists(filename) + local ok, result, message = pcall(io.open,filename) + if result then + io.close(result) + return true + else + return false + end +end + +function os.envvar(str) + local s = os.getenv(str) + if s ~= '' then + return s + end + s = os.getenv(upper(str)) + if s ~= '' then + return s + end + s = os.getenv(lower(str)) + if s ~= '' then + return s + end +end + +-- support functions, maybe editor namespace + +-- function column_of_position(position) +-- local line = editor:LineFromPosition(position) +-- local oldposition = editor.CurrentPos +-- local column = 0 +-- editor:GotoPos(position) +-- while editor.CurrentPos ~= 0 and line == editor:LineFromPosition(editor.CurrentPos) do +-- editor:CharLeft() +-- column = column + 1 +-- end +-- editor:GotoPos(oldposition) +-- if line > 0 then +-- return column -1 +-- else +-- return column +-- end +-- end + +-- function line_of_position(position) +-- return editor:LineFromPosition(position) +-- end + +function extend_to_start() + local selectionstart = editor.SelectionStart + local selectionend = editor.SelectionEnd + local line = editor:LineFromPosition(selectionstart) + if line > 0 then + while line == editor:LineFromPosition(selectionstart-1) do + selectionstart = selectionstart - 1 + editor:SetSel(selectionstart,selectionend) + end + else + selectionstart = 0 + end + editor:SetSel(selectionstart,selectionend) + return selectionstart +end + +function extend_to_end() -- editor:LineEndExtend() does not work + local selectionstart = editor.SelectionStart + local selectionend = editor.SelectionEnd + local line = editor:LineFromPosition(selectionend) + while line == editor:LineFromPosition(selectionend+1) do + selectionend = selectionend + 1 + editor:SetSel(selectionstart,selectionend) + end + editor:SetSel(selectionstart,selectionend) + return selectionend +end + +function getfiletype() + local firstline = editor:GetLine(0) + if editor.Lexer == SCLEX_TEX then + return 'tex' + elseif editor.Lexer == SCLEX_XML then + return 'xml' + elseif find(firstline,"^%%") then + return 'tex' + elseif find(firstline,"^<%?xml") then + return 'xml' + else + return 'unknown' + end +end + +-- inspired by LuaExt's scite_Files + +function get_dir_list(mask) + local f + if props['PLAT_GTK'] and props['PLAT_GTK'] ~= "" then + f = io.popen('ls -1 ' .. mask) + else + mask = gsub(mask,'/','\\') + local tmpfile = 'scite-ctx.tmp' + local cmd = 'dir /b "' .. mask .. '" > ' .. tmpfile + os.execute(cmd) + f = io.open(tmpfile) + end + local files = {} + if not f then -- path check added + return files + end + for line in f:lines() do + files[#files+1] = line + end + f:close() + return files +end + +-- banner + +do + + print("loading scite-ctx.lua definition file\n") + print("- see scite-ctx.properties for configuring info\n") + print("- ctx.spellcheck.wordpath set to " .. props['ctx.spellcheck.wordpath']) + if find(lower(props['ctx.spellcheck.wordpath']),"ctxspellpath") then + if os.getenv('ctxspellpath') then + print("- ctxspellpath set to " .. os.getenv('CTXSPELLPATH')) + else + print("- 'ctxspellpath is not set") + end + print("- ctx.spellcheck.wordpath expands to " .. string.expand(props['ctx.spellcheck.wordpath'])) + end + print("\n- ctx.wraptext.length is set to " .. props['ctx.wraptext.length']) + if props['ctx.helpinfo'] ~= '' then + print("\n- key bindings:\n") + print((gsub(string.strip(props['ctx.helpinfo']),"%s*\|%s*","\n"))) + end + print("\n- recognized first lines:\n") + print("xml ", 2) + +function wrap_text() + + -- We always go to the end of a line, so in fact some of + -- the variables set next are not needed. + + local length = props["ctx.wraptext.length"] + + if length == '' then length = 80 else length = tonumber(length) end + + local startposition = editor.SelectionStart + local endposition = editor.SelectionEnd + + if startposition == endposition then return end + + editor:LineEndExtend() + + startposition = editor.SelectionStart + endposition = editor.SelectionEnd + + -- local startline = line_of_position(startposition) + -- local endline = line_of_position(endposition) + -- local startcolumn = column_of_position(startposition) + -- local endcolumn = column_of_position(endposition) + -- + -- editor:SetSel(startposition,endposition) + + local startline = props['SelectionStartLine'] + local endline = props['SelectionEndLine'] + local startcolumn = props['SelectionStartColumn'] - 1 + local endcolumn = props['SelectionEndColumn'] - 1 + + local replacement = { } + local templine = '' + local indentation = rep(' ',startcolumn) + local selection = editor:GetSelText() + + selection = gsub(selection,"[\n\r][\n\r]","\n") + selection = gsub(selection,"\n\n+",' ' .. magicstring .. ' ') + selection = gsub(selection,"^%s",'') + + for snippet in gmatch(selection,"%S+") do + if snippet == magicstring then + replacement[#replacement+1] = templine + replacement[#replacement+1] = "" + templine = '' + elseif #templine + #snippet > length then + replacement[#replacement+1] = templine + templine = indentation .. snippet + elseif #templine == 0 then + templine = indentation .. snippet + else + templine = templine .. ' ' .. snippet + end + end + + replacement[#replacement+1] = templine + replacement[1] = gsub(replacement[1],"^%s+",'') + + if endcolumn == 0 then + replacement[#replacement+1] = "" + end + + editor:ReplaceSel(concat(replacement,"\n")) + +end + +function unwrap_text() + + local startposition = editor.SelectionStart + local endposition = editor.SelectionEnd + + if startposition == endposition then return end + + editor:HomeExtend() + editor:LineEndExtend() + + startposition = editor.SelectionStart + endposition = editor.SelectionEnd + + local magicstring = rep("", 2) + local selection = gsub(editor:GetSelText(),"[\n\r][\n\r]+", ' ' .. magicstring .. ' ') + local replacement = '' + + for snippet in gmatch(selection,"%S+") do + if snippet == magicstring then + replacement = replacement .. "\n" + else + replacement = replacement .. snippet .. "\n" + end + end + + if endcolumn == 0 then replacement = replacement .. "\n" end + + editor:ReplaceSel(replacement) + +end + +function sort_text() + + local startposition = editor.SelectionStart + local endposition = editor.SelectionEnd + + if startposition == endposition then return end + + -- local startcolumn = column_of_position(startposition) + -- local endcolumn = column_of_position(endposition) + -- + -- editor:SetSel(startposition,endposition) + + local startline = props['SelectionStartLine'] + local endline = props['SelectionEndLine'] + local startcolumn = props['SelectionStartColumn'] - 1 + local endcolumn = props['SelectionEndColumn'] - 1 + + startposition = extend_to_start() + endposition = extend_to_end() + + local selection = gsub(editor:GetSelText(), "%s*$", '') + + list = string.grab(selection,"[^\n\r]+") + table.alphasort(list, startcolumn) + local replacement = concat(list, "\n") + + editor:GotoPos(startposition) + editor:SetSel(startposition,endposition) + + if endcolumn == 0 then replacement = replacement .. "\n" end + + editor:ReplaceSel(replacement) + +end + +function uncomment_xml() + + local startposition = editor.SelectionStart + local endposition = editor.SelectionEnd + + if startposition == endposition then return end + + local startposition = editor.SelectionStart + local endposition = editor.SelectionEnd + + local selection = gsub(editor:GetSelText(), "%<%!%-%-.-%-%-%>", '') + + editor:GotoPos(startposition) + editor:SetSel(startposition,endposition) + + editor:ReplaceSel(selection) + editor:GotoPos(startposition) + +end + +function document_text() + + local startposition = editor.SelectionStart + local endposition = editor.SelectionEnd + + if startposition == endposition then return end + + startposition = extend_to_start() + endposition = extend_to_end() + + editor:SetSel(startposition,endposition) + + local filetype = getfiletype() + + local replacement = '' + for i = editor:LineFromPosition(startposition), editor:LineFromPosition(endposition) do + local str = editor:GetLine(i) + if filetype == 'xml' then + if find(str,"^<%!%-%- .* %-%->%s*$") then + replacement = replacement .. gsub(str,"^<%!%-%- (.*) %-%->(%s*)$","%1\n") + elseif find(str,"%S") then + replacement = replacement .. '\n" + else + replacement = replacement .. str + end + else + if find(str,"^%%D%s+$") then + replacement = replacement .. "\n" + elseif find(str,"^%%D ") then + replacement = replacement .. gsub(str,"^%%D ",'') + else + replacement = replacement .. '%D ' .. str + end + end + end + + editor:ReplaceSel(gsub(replacement,"[\n\r]$",'')) + +end + +function quote_text() + + local filetype, leftquotation, rightquotation = getfiletype(), '', '' + + if filetype == 'xml' then + leftquotation, rightquotation = "", "" + leftquote, rightquote = "", "" + else + leftquotation, rightquotation = "\\quotation {", "}" + leftquote, rightquote = "\\quote {", "}" + end + + local replacement = editor:GetSelText() + replacement = gsub(replacement,"\`\`(.-)\'\'", leftquotation .. "%1" .. rightquotation) + replacement = gsub(replacement,"\"(.-)\"", leftquotation .. "%1" .. rightquotation) + replacement = gsub(replacement,"\`(.-)\'", leftquote .. "%1" .. rightquote ) + replacement = gsub(replacement,"\'(.-)\'", leftquote .. "%1" .. rightquote ) + editor:ReplaceSel(replacement) + +end + +function compound_text() + + local filetype = getfiletype() + + if filetype == 'xml' then + editor:ReplaceSel(gsub(editor:GetSelText(),"(>[^<%-][^<%-]+)([-\/])(%w%w+)","%1%3")) + else + editor:ReplaceSel(gsub(editor:GetSelText(),"([^\|])([-\/]+)([^\|])","%1|%2|%3")) + end + +end + +-- written while listening to Alanis Morissette's acoustic +-- Jagged Little Pill and Tori Amos' Beekeeper after +-- reinstalling on my good old ATH-7 + +local language = props["ctx.spellcheck.language"] +local wordsize = props["ctx.spellcheck.wordsize"] +local wordpath = props["ctx.spellcheck.wordpath"] + +if language == '' then language = 'uk' end +if wordsize == '' then wordsize = 4 else wordsize = tonumber(wordsize) end + +local wordfile = "" +local wordlist = {} +local worddone = 0 + +-- we use wordlist as a hash so that we can add entries without the +-- need to sort and also use a fast (built in) search + +-- function kpsewhich_file(filename,filetype,progname) +-- local progflag, typeflag = '', '' +-- local tempname = os.tmpname() +-- if progname then +-- progflag = " --progname=" .. progname .. " " +-- end +-- if filetype then +-- typeflag = " --format=" .. filetype .. " " +-- end +-- local command = "kpsewhich" .. progflag .. typeflag .. " " .. filename .. " > " .. tempname +-- os.execute(command) +-- for line in io.lines(tempname) do +-- return gsub(line, "\s*$", '') +-- end +-- end + +function check_text() + + local dlanguage = props["ctx.spellcheck.language"] + local dwordsize = props["ctx.spellcheck.wordsize"] + local dwordpath = props["ctx.spellcheck.wordpath"] + + if dlanguage ~= '' then dlanguage = tostring(language) end + if dwordsize ~= '' then dwordsize = tonumber(wordsize) end + + local firstline, skipfirst = editor:GetLine(0), false + local filetype, wordskip, wordgood = getfiletype(), '', '' + + if filetype == 'tex' then + wordskip = "\\" + elseif filetype == 'xml' then + wordskip = "<" + wordgood = ">" + end + + if props["ctx.spellcheck.language"] == 'auto' then + if filetype == 'tex' then + -- % version =1.0 language=uk + firstline = gsub(firstline,"^%%%s*",'') + firstline = gsub(firstline,"%s*$",'') + for key, val in gmatch(firstline,"(%w+)=(%w+)") do + if key == "language" then + language = val + traceln("auto document language " .. "'" .. language .. "' (tex)") + end + end + skipfirst = true + elseif filetype == 'xml' then + -- + firstline = gsub(firstline,"^%<%?xml%s*", '') + firstline = gsub(firstline,"%s*%?%>%s*$", '') + for key, val in gmatch(firstline,"(%w+)=[\"\'](.-)[\"\']") do + if key == "language" then + language = val + traceln("auto document language " .. "'" .. language .. "' (xml)") + end + end + skipfirst = true + end + end + + local fname = props["ctx.spellcheck.wordfile." .. language] + local fsize = props["ctx.spellcheck.wordsize." .. language] + + if fsize ~= '' then wordsize = tonumber(fsize) end + + if fname ~= '' and fname ~= wordfile then + wordfile, worddone, wordlist = fname, 0, {} + for filename in gmatch(wordfile,"[^%,]+") do + if wordpath ~= '' then + filename = string.expand(wordpath) .. '/' .. filename + end + if io.exists(filename) then + traceln("loading " .. filename) + for line in io.lines(filename) do + if not find(line,"^[\%\#\-]") then + str = gsub(line,"%s*$", '') + rawset(wordlist,str,true) + worddone = worddone + 1 + end + end + else + traceln("unknown file '" .. filename .."'") + end + end + traceln(worddone .. " words loaded") + end + + reset_text() + + if worddone == 0 then + traceln("no (valid) language or wordfile specified") + else + traceln("start checking") + if wordskip ~= '' then + traceln("ignoring " .. wordskip .. "..." .. wordgood) + end + local i, j, lastpos, startpos, endpos, snippet, len, first = 0, 0, -1, 0, 0, '', 0, 0 + local ok, skip, ch = false, false, '' + if skipfirst then first = #firstline end + for k = first, editor.TextLength do + ch = editor:textrange(k,k+1) + if wordgood ~= '' and ch == wordgood then + skip = false + elseif ch == wordskip then + skip = true + end + if find(ch,"%w") and not find(ch,"%d") then + if not skip then + if ok then + endpos = k + else + startpos = k + endpos = k + ok = true + end + end + elseif ok and not skip then + len = endpos - startpos + 1 + if len >= wordsize then + snippet = editor:textrange(startpos,endpos+1) + i = i + 1 + if wordlist[snippet] or wordlist[lower(snippet)] then + j = j + 1 + else + editor:StartStyling(startpos,INDICS_MASK) + editor:SetStyling(len,INDIC2_MASK) -- INDIC0_MASK+2 + end + end + ok = false + elseif wordgood == '' then + skip = (ch == wordskip) + end + end + traceln(i .. " words checked, " .. (i-j) .. " errors") + end + +end + +function reset_text() + editor:StartStyling(0,INDICS_MASK) + editor:SetStyling(editor.TextLength,INDIC_PLAIN) +end + +-- menu + +local menuactions = {} +local menufunctions = {} + +function UserListShow(menutrigger, menulist) + local menuentries = {} + local list = string.grab(menulist,"[^%|]+") + menuactions = {} + for i=1, #list do + if list[i] ~= '' then + for key, val in gmatch(list[i],"%s*(.+)=(.+)%s*") do + menuentries[#menuentries+1] = key + menuactions[key] = val + end + end + end + local menustring = concat(menuentries,'|') + if menustring == "" then + traceln("There are no templates defined for this file type.") + else + editor.AutoCSeparator = byte('|') + editor:UserListShow(menutrigger,menustring) + editor.AutoCSeparator = byte(' ') + end +end + +function OnUserListSelection(trigger,choice) + if menufunctions[trigger] and menuactions[choice] then + return menufunctions[trigger](menuactions[choice]) + else + return false + end +end + +-- main menu + +local menutrigger = 12 + +function show_menu(menulist) + UserListShow(menutrigger, menulist) +end + +function process_menu(action) + if not find(action,"%(%)$") then + assert(loadstring(action .. "()"))() + else + assert(loadstring(action))() + end +end + +menufunctions[12] = process_menu + +-- templates + +local templatetrigger = 13 + +local ctx_template_paths = { "./ctx-templates", "../ctx-templates", "../../ctx-templates" } +local ctx_auto_templates = false +local ctx_template_list = "" + +local ctx_path_list = {} +local ctx_path_done = {} +local ctx_path_name = {} + +function ctx_list_loaded(path) + return ctx_path_list[path] and #ctx_path_list[path] > 0 +end + +function insert_template(templatelist) + if props["ctx.template.scan"] == "yes" then + local path = props["FileDir"] + local rescan = props["ctx.template.rescan"] == "yes" + local suffix = props["ctx.template.suffix." .. props["FileExt"]] -- alas, no suffix expansion here + local current = path .. "+" .. props["FileExt"] + if rescan then + print("re-scanning enabled") + end + ctx_template_list = "" + if not ctx_path_done[path] or rescan then + local pattern = "*.*" + for i, pathname in ipairs(ctx_template_paths) do + print("scanning " .. gsub(path,"\\","/") .. "/" .. pathname) + ctx_path_name[path] = pathname + ctx_path_list[path] = get_dir_list(pathname .. "/" .. pattern) + if ctx_list_loaded(path) then + print("finished locating template files") + break + end + end + if ctx_list_loaded(path) then + print(#ctx_path_list[path] .. " template files found") + else + print("no template files found") + end + end + if ctx_list_loaded(path) then + ctx_template_list = "" + local pattern = "%." .. suffix .. "$" + local n = 0 + for j, filename in ipairs(ctx_path_list[path]) do + if find(filename,pattern) then + n = n + 1 + local menuname = gsub(filename,"%..-$","") + if ctx_template_list ~= "" then + ctx_template_list = ctx_template_list .. "|" + end + ctx_template_list = ctx_template_list .. menuname .. "=" .. ctx_path_name[path] .. "/" .. filename + end + end + if not ctx_path_done[path] then + print(n .. " suitable template files found") + end + end + ctx_path_done[path] = true + if ctx_template_list == "" then + ctx_auto_templates = false + else + ctx_auto_templates = true + templatelist = ctx_template_list + end + else + ctx_auto_templates = false + end + if templatelist ~= "" then + UserListShow(templatetrigger, templatelist) + end +end + + +-- ctx.template.[whatever].[filetype] +-- ctx.template.[whatever].data.[filetype] +-- ctx.template.[whatever].file.[filetype] +-- ctx.template.[whatever].list.[filetype] + +function process_template_one(action) + local text = nil + if ctx_auto_templates then + local f = io.open(action,"r") + if f then + text = gsub(f:read("*all"),"\n$","") + f:close() + else + print("unable to auto load template file " .. text) + text = nil + end + end + if not text or text == "" then + text = props["ctx.template." .. action .. ".file"] + if not text or text == "" then + text = props["ctx.template." .. action .. ".data"] + if not text or text == "" then + text = props["ctx.template." .. action] + end + else + local f = io.open(text,"r") + if f then + text = gsub(f:read("*all"),"\n$","") + f:close() + else + print("unable to load template file " .. text) + text = nil + end + end + end + if text then + text = gsub(text,"\\n","\n") + local pos = find(text,"%?") + text = gsub(text,"%?","") + editor:insert(editor.CurrentPos,text) + if pos then + editor.CurrentPos = editor.CurrentPos + pos - 1 + editor.SelectionStart = editor.CurrentPos + editor.SelectionEnd = editor.CurrentPos + editor:GotoPos(editor.CurrentPos) + end + end +end + +menufunctions[13] = process_template_one +menufunctions[14] = process_template_two + +-- command.name.26.*=Open Logfile +-- command.subsystem.26.*=3 +-- command.26.*=open_log +-- command.save.before.26.*=2 +-- command.groupundo.26.*=yes +-- command.shortcut.26.*=Ctrl+E + +function open_log() + scite.Open(props['FileName'] .. ".log") +end diff --git a/Master/texmf-dist/context/data/scite/scite-ctx.properties b/Master/texmf-dist/context/data/scite/scite-ctx.properties index 94a51aeb739..f23508acab4 100644 --- a/Master/texmf-dist/context/data/scite/scite-ctx.properties +++ b/Master/texmf-dist/context/data/scite/scite-ctx.properties @@ -12,7 +12,8 @@ # ext.lua.auto.reload=1 -ext.lua.startup.script=$(SciteDefaultHome)/scite-ctx.lua +#~ ext.lua.startup.script=$(SciteDefaultHome)/scite-ctx.lua +ext.lua.startup.script=$(SciteUserHome)/scite-ctx.lua #~ extension.$(file.patterns.context)=scite-ctx.lua #~ extension.$(file.patterns.example)=scite-ctx.lua diff --git a/Master/texmf-dist/context/data/scite/scite-ctx.readme b/Master/texmf-dist/context/data/scite/scite-ctx.readme index f796e543eb9..c5f5977a669 100644 --- a/Master/texmf-dist/context/data/scite/scite-ctx.readme +++ b/Master/texmf-dist/context/data/scite/scite-ctx.readme @@ -1,8 +1,19 @@ -Short note: - -- use scite 1.64 of later -- copy scite-ctx.lua to ..\scite\wscite -- copy scite-ctx.properties to wherever context.properties lives -- add "import" to context.properties -- create path for spell files and put spell files there -- let environment variable CTXSPELLPATH to spell path +installation: + +-- download scite and install it (take the normal scite, not one that ships with ruby) +-- copy the texmf-context\context\data\scite\*.properties to c:\users\ +-- copy the texmf-context\context\data\scite\*.lua to the place where scite has its own support files (wscite) +-- add the line "import context" to your user properties file (entry under menu Options) + +running context: + +-- add texmf-mswin\bin to your path + +or + +-- start scite from a cmd that also calls setuptex + +spellchecking: + +-- create a path for spell files and put spell files there (these are files with just words) +-- set the environment variable CTXSPELLPATH to this path diff --git a/Master/texmf-dist/context/data/textadept/context.lua b/Master/texmf-dist/context/data/textadept/context.lua new file mode 100644 index 00000000000..f386eb87d13 --- /dev/null +++ b/Master/texmf-dist/context/data/textadept/context.lua @@ -0,0 +1,710 @@ +--[[ + Preliminary ConTeXT lexer + + % ConTeXt + tex context + mkii context + mkiv context + + experiment dd 2009/10/28 .. todo: + + -- figure out if tabs instead of splits are possible + -- locate an option to enter name in file dialogue (like windows permits) + -- nesting of lua somehow does not re-lex while typing (backtrack till begin_...) + -- find trick to overload latex lexer (mime_types, metatype trickery) + -- convert scite lua scripts to textadept + -- check linux and osx versions + -- figure out why loading a file fails (lfs too) + + so, this is just an experiment + +]]-- + +local textadept = _G.textadept + +module(..., package.seeall) + +local context = context +local lua = require 'lua' + +local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V + +--~ local some_words = { "starttext", "stoptext", "writestatus" } + +local some_words = { + "CAP", + "Cap", + "Caps", + "Character", + "Characters", + "MONTH", + "Romannumerals", + "WEEKDAY", + "WORD", + "WORDS", + "Word", + "Words", + "about", + "adaptlayout", + "adding", + "appendix", + "arg", + "at", + "atpage", + "background", + "blackrule", + "blackrules", + "blank", + "bookmark", + "but", + "button", + "bypassblocks", + "cap", + "chapter", + "character", + "characters", + "chem", + "clip", + "clonefield", + "color", + "column", + "comment", + "comparecolorgroup", + "comparepalet", + "completecombinedlist", + "completelistoffloats", + "completelistofsorts", + "completelistofsynonyms", + "completeregister", + "convertnumber", + "copyfield", + "correctwhitespace", + "coupledocument", + "coupledregister", + "couplemarking", + "couplepage", + "couplepaper", + "coupleregister", + "crlf", + "currentdate", + "currentheadnumber", + "date", + "decouplemarking", + "define", + "defineblank", + "defineblock", + "definebodyfont", + "definebodyfontenvironment", + "definebuffer", + "definecolor", + "definecolorgroup", + "definecombinedlist", + "defineconversion", + "definedescription", + "defineenumeration", + "definefield", + "definefieldstack", + "definefiguresymbol", + "definefloat", + "definefont", + "defineframed", + "defineframedtext", + "definehead", + "defineindenting", + "defineinteractionmenu", + "defineinteractionmenu", + "definelabel", + "definelist", + "definelogo", + "definemakeup", + "definemarking", + "defineoutput", + "defineoverlay", + "definepalet", + "definepapersize", + "defineparagraphs", + "defineprofile", + "defineprogram", + "definerawfont", + "definereference", + "definereferenceformat", + "definereferencelist", + "defineregister", + "definerule", + "definesection", + "definesectionblock", + "definesorting", + "definestartstop", + "definesubfield", + "definesymbol", + "definesynonyms", + "definetabletemplate", + "definetabulate", + "definetext", + "definetextposition", + "definetextvariable", + "definetype", + "definetyping", + "defineversion", + "description", + "determineheadnumber", + "determinelistcharacteristics", + "disableinteractionmenu", + "donttest", + "emptylines", + "enumeration", + "externalfigure", + "field", + "fieldstack", + "fillinfield", + "fillinline", + "fillinrules", + "fillintext", + "fitfield", + "fixedspaces", + "followprofile", + "followprofileversion", + "followversion", + "footnote", + "footnotetext", + "forceblocks", + "fraction", + "framed", + "framedtext", + "from", + "getbuffer", + "getmarking", + "godown", + "goto", + "gotobox", + "graycolor", + "grid", + "hairline", + "head", + "headnumber", + "headtext", + "hideblocks", + "high", + "hl", + "in", + "indentation", + "indenting", + "inframed", + "ininner", + "inleft", + "inline", + "inmargin", + "inothermargin", + "inouter", + "inright", + "installlanguage", + "interactionbar", + "interactionbuttons", + "item", + "items", + "its", + "keepblocks", + "labeling", + "labels", + "labeltext", + "language", + "leftaligned", + "listsymbol", + "loadsorts", + "loadsynonyms", + "logfields", + "lohi", + "low", + "mainlanguage", + "mar", + "marginrule", + "margintext", + "marking", + "markversion", + "mathematics", + "mediaeval", + "midaligned", + "mirror", + "month", + "moveongrid", + "name", + "nextsection", + "nocap", + "noheaderandfooterlines", + "noindenting", + "nolist", + "nomarking", + "nomoreblocks", + "nomorefiles", + "nop", + "nospace", + "note", + "notopandbottomlines", + "nowhitespace", + "numbers", + "overbar", + "overbars", + "overstrike", + "overstrikes", + "packed", + "page", + "pagereference", + "pagetype", + "paragraph", + "part", + "periods", + "placebookmarks", + "placecombinedlist", + "placecombinedlist", + "placefloat", + "placefootnotes", + "placeformula", + "placelegend", + "placelist", + "placelistoffloats", + "placelistofsorts", + "placelistofsynonyms", + "placelocalfootnotes", + "placelogos", + "placeongrid", + "placeontopofeachother", + "placereferencelist", + "placeregister", + "placeregister", + "placerule", + "placesidebyside", + "placesubformula", + "placetextvariable", + "position", + "positiontext", + "processblocks", + "processpage", + "program", + "publication", + "quotation", + "quote", + "ran", + "ref", + "reference", + "referral", + "referraldate", + "register", + "reservefloat", + "reset", + "resetmarking", + "resettextcontent", + "rightaligned", + "romannumerals", + "rotate", + "scale", + "screen", + "section", + "seeregister", + "selectblocks", + "selectpaper", + "selectversion", + "settextcontent", + "settextvariable", + "setupalign", + "setuparranging", + "setupbackground", + "setupbackgrounds", + "setupblackrules", + "setupblank", + "setupblock", + "setupbodyfont", + "setupbodyfontenvironment", + "setupbottom", + "setupbottomtexts", + "setupbuffer", + "setupbuttons", + "setupcapitals", + "setupcaption", + "setupcaptions", + "setupclipping", + "setupcolor", + "setupcolors", + "setupcolumns", + "setupcombinations", + "setupcombinedlist", + "setupcomment", + "setupdescriptions", + "setupenumerations", + "setupexternalfigures", + "setupfield", + "setupfields", + "setupfillinlines", + "setupfillinrules", + "setupfloat", + "setupfloats", + "setupfloatsplitting", + "setupfooter", + "setupfootertexts", + "setupfootnotedefinition", + "setupfootnotes", + "setupforms", + "setupformulae", + "setupframed", + "setupframedtexts", + "setuphead", + "setupheader", + "setupheadertexts", + "setupheadnumber", + "setupheads", + "setupheadtext", + "setuphyphenmark", + "setupindentations", + "setupindenting", + "setupinmargin", + "setupinteraction", + "setupinteractionbar", + "setupinteractionscreen", + "setupinterlinespace", + "setupinterlinespace", + "setupitemgroup", + "setupitems", + "setuplabeltext", + "setuplanguage", + "setuplayout", + "setuplegend", + "setuplinenumbering", + "setuplines", + "setuplinewidth", + "setuplist", + "setuplistalternative", + "setupmakeup", + "setupmarginblocks", + "setupmarginrules", + "setupmarking", + "setupnarrower", + "setupnumbering", + "setupoppositeplacing", + "setupoutput", + "setuppagenumber", + "setuppagenumbering", + "setuppagetransitions", + "setuppalet", + "setuppaper", + "setuppapersize", + "setupparagraphnumbering", + "setupparagraphs", + "setuppositioning", + "setupprofiles", + "setupprograms", + "setuppublications", + "setupquote", + "setupreferencelist", + "setupreferencing", + "setupregister", + "setuprotate", + "setuprule", + "setupscreens", + "setupsection", + "setupsectionblock", + "setupsorting", + "setupspacing", + "setupstrut", + "setupsubpagenumber", + "setupsymbolset", + "setupsynchronization", + "setupsynchronizationbar", + "setupsynonyms", + "setupsystem", + "setuptab", + "setuptables", + "setuptabulate", + "setuptext", + "setuptextposition", + "setuptextrules", + "setuptexttexts", + "setuptextvariable", + "setupthinrules", + "setuptolerance", + "setuptop", + "setuptoptexts", + "setuptype", + "setuptyping", + "setupunderbar", + "setupurl", + "setupversions", + "setupwhitespace", + "showbodyfont", + "showbodyfontenvironment", + "showcolor", + "showcolorgroup", + "showexternalfigures", + "showfields", + "showframe", + "showgrid", + "showlayout", + "showmakeup", + "showpalet", + "showprint", + "showsetups", + "showstruts", + "showsymbolset", + "someline", + "somewhere", + "sort", + "space", + "splitfloat", + "startalignment", + "startbackground", + "startbuffer", + "startcolor", + "startcolumns", + "startcombination", + "startcomment", + "startcomponent", + "startdescription", + "startdocument", + "startenumeration", + "startenvironment", + "startfact", + "startfigure", + "startfloattext", + "startformula", + "startframedtext", + "starthiding", + "startinteractionmenu", + "startitemgroup", + "startlegend", + "startline", + "startlinecorrection", + "startlinenumbering", + "startlines", + "startlocal", + "startlocalenvironment", + "startlocalfootnotes", + "startmakeup", + "startmarginblock", + "startmarginrule", + "startnamemakeup", + "startnarrower", + "startopposite", + "startoverlay", + "startoverview", + "startpacked", + "startparagraph", + "startpositioning", + "startpostponing", + "startproduct", + "startprofile", + "startproject", + "startquotation", + "startregister", + "startsymbolset", + "startsynchronization", + "starttable", + "starttables", + "starttabulate", + "starttextrule", + "starttyping", + "startunpacked", + "startversion", + "stopalignment", + "stopbackground", + "stopbuffer", + "stopcolor", + "stopcolumns", + "stopcombination", + "stopcomment", + "stopcomponent", + "stopdescription", + "stopdocument", + "stopenumeration", + "stopenvironment", + "stopfact", + "stopfigure", + "stopfloattext", + "stopformula", + "stopframedtext", + "stophiding", + "stopinteractionmenu", + "stopitemgroup", + "stoplegend", + "stopline", + "stoplinecorrection", + "stoplinenumbering", + "stoplines", + "stoplocal", + "stoplocalenvironment", + "stoplocalfootnotes", + "stopmakeup", + "stopmarginblock", + "stopmarginrule", + "stopnamemakeup", + "stopnarrower", + "stopopposite", + "stopoverlay", + "stopoverview", + "stoppacked", + "stopparagraph", + "stoppositioning", + "stoppostponing", + "stopproduct", + "stopprofile", + "stopproject", + "stopquotation", + "stopsymbolset", + "stopsynchronization", + "stoptable", + "stoptables", + "stoptabulate", + "stoptextrule", + "stoptyping", + "stopunpacked", + "stopversion", + "stretched", + "sub", + "subject", + "subsection", + "subsubject", + "subsubsection", + "subsubsubject", + "switchtobodyfont", + "switchtorawfont", + "sym", + "symbol", + "synchronizationbar", + "synchronize", + "synonym", + "tab", + "tex", + "textreference", + "textrule", + "textvariable", + "thinrule", + "thinrules", + "title", + "tooltip", + "translate", + "typ", + "type", + "typebuffer", + "typefile", + "underbar", + "underbars", + "useURL", + "useblocks", + "usecommands", + "usedirectory", + "useencoding", + "useexternaldocument", + "useexternalfigure", + "useexternalfile", + "useexternalfiles", + "useexternalsoundtrack", + "usemodule", + "usereferences", + "usespecials", + "usesymbols", + "version", + "vl", + "weekday", + "whitespace", + "wordright", + "writebetweenlist", + "writetolist", + "writetoreferencelist", + "writetoregister", +} + +local tex_word_match = word_match(word_list(some_words)) + +--~ local function tex_preamble_match() +--~ return P(function(input, index) +--~ if index < 10 then +--~ local s, e, word = input:find('^(.+)[\n\r]',index) +--~ if word then +--~ local interface = word:match("interface=(..)") +--~ if interface then +--~ local name = "c:/data/develop/context/lua/textadept/cont-" .. interface .. "-scite.lua" +--~ --~ local f = io.open(name,"rb") -- fails +--~ if f then +--~ local data = f:read("*all") +--~ data = data and loadstring(data) +--~ data = data and data() +--~ if data and type(data) == "table" then +--~ some_words = word_list(data) +--~ context.LoadTokens() +--~ -- InitLexer(context) +--~ end +--~ f:close() +--~ end +--~ end +--~ end +--~ end +--~ return false +--~ end) +--~ end + +local spacing = token('whitespace', S(" \n\r\t\f")^1) +--~ local preamble = token('preamble', P('%') * tex_preamble_match()) +local comment = token('comment', P('%') * (1-S("\n\r"))^0) +local keyword = token('keyword', P('\\') * tex_word_match) +local command = token('command', P('\\') * ((R("az","AZ")+S("@!?"))^1 + P(1))) +local grouping = token('grouping', S("{$}")) +local specials = token('specials', S("#()[]<>=\"")) +local extras = token('extras', S("`~%^&_-+/\'|")) + +-- LexByLine = true + +local startluacode = token("grouping", P("\\startluacode")) +local stopluacode = token("grouping", P("\\stopluacode")) +--~ local startctxlua = token("grouping", P("\\ctxlua") * spacing^0 * P("{")) +--~ local stopctxlua = token("grouping", P("}")) + +--~ local startMPcode = token("grouping", P("\\startMPcode")) +--~ local stopMPcode = token("grouping", P("\\stopMPcode")) +--~ local startuseMPgraphic = token("grouping", P("\\startuseMPgraphic")) +--~ local stopuseMPgraphic = token("grouping", P("\\stopuseMPgraphic")) +--~ local startreusableMPgraphic = token("grouping", P("\\startreusableMPgraphic")) +--~ local stopreusableMPgraphic = token("grouping", P("\\stopreusableMPgraphic")) +--~ local startuniqueMPgraphic = token("grouping", P("\\startuniqueMPgraphic")) +--~ local stopuniqueMPgraphic = token("grouping", P("\\stopuniqueMPgraphic")) + +function LoadTokens() + + lua.LoadTokens() + + add_token(context, 'whitespace', spacing) +--~ add_token(context, 'preamble', preamble) + add_token(context, 'comment', comment) + add_token(context, 'keyword', keyword) + add_token(context, 'command', command) + add_token(context, 'grouping', grouping) + add_token(context, 'specials', specials) + add_token(context, 'extras', extras) + add_token(context, 'any_char', any_char) + + lua.TokenPatterns.any_char = token('default', 1 - stopluacode) + + make_embeddable(lua, context, startluacode, stopluacode) +-- make_embeddable(lua, context, startctxlua, stopctxlua) -- no multiple embeddables unless more complex anychar + embed_language(context, lua) + +-- metapost.LoadTokens() +-- metapost.TokenPatterns.any_char = token('any_char', 1 - stopMPcode - stopuseMPgraphic - stopreusableMPgraphic - stopuniqueMPgraphic) +-- make_embeddable(metapost, context, startMPcode, stopMPcode ) +-- make_embeddable(metapost, context, startuseMPgraphic, stopuseMPgraphic ) +-- make_embeddable(metapost, context, startreusableMPgraphic, stopreusableMPgraphic) +-- make_embeddable(metapost, context, startuniqueMPgraphic, stopuniqueMPgraphic ) +end + +local bold = true +local italic = true + +function LoadStyles() + add_style('preamble', style_nothing .. { fore = colors.yellow, bold = bold }) + add_style('comment', style_nothing .. { fore = colors.yellow, bold = bold }) + add_style('keyword', style_nothing .. { fore = colors.green, bold = bold, italic = italic }) + add_style('command', style_nothing .. { fore = colors.green, bold = bold }) + add_style('grouping', style_nothing .. { fore = colors.red, bold = bold }) + add_style('specials', style_nothing .. { fore = colors.blue, bold = bold }) + add_style('extras', style_nothing .. { fore = colors.yellow, bold = bold }) +end + +--~ textadept.mime_types.extensions["tex"] = context +--~ textadept.mime_types.extensions["mkii"] = context +--~ textadept.mime_types.extensions["mkiv"] = context diff --git a/Master/texmf-dist/context/data/texworks/TUG/TeXworks.ini b/Master/texmf-dist/context/data/texworks/TUG/TeXworks.ini new file mode 100644 index 00000000000..8c0a4a43df3 --- /dev/null +++ b/Master/texmf-dist/context/data/texworks/TUG/TeXworks.ini @@ -0,0 +1,18 @@ +[General] +launchOption=1 +locale=en +syntaxColoring=ConTeXt TeX +autoIndent=None +wrapLines=false +tabWidth=32 +font="Lucida Console,10,-1,5,50,0,0,0,0,0" +language=None +scaleOption=1 +previewScale=200 +magnifierSize=2 +circularMagnifier=true +defaultEngine=ConTeXt MkIV +binaryPaths= +autoHideConsole=false +openDialogDir= +recentFileList= diff --git a/Master/texmf-dist/context/data/texworks/TUG/texworks.ini b/Master/texmf-dist/context/data/texworks/TUG/texworks.ini deleted file mode 100644 index 8ef43433ed1..00000000000 --- a/Master/texmf-dist/context/data/texworks/TUG/texworks.ini +++ /dev/null @@ -1,45 +0,0 @@ -[General] -launchOption=1 -locale=en -syntaxColoring=ConTeXt TeX -autoIndent=None -wrapLines=false -tabWidth=32 -font="Lucida Console,10,-1,5,50,0,0,0,0,0" -language=None -scaleOption=1 -previewScale=200 -magnifierSize=2 -circularMagnifier=true -defaultEngine=ConTeXt MkIV -binaryPaths= -autoHideConsole=false -openDialogDir= -recentFileList= - -[engines] -size=6 -1\arguments=--autogenerate, --script, context, $fullname -1\name=ConTeXt MkIV -1\program=mtxrun.cmd -1\showPdf=true -2\arguments=--autogenerate, --script, check, $fullname -2\name=ConTeXt MkIV Check -2\program=mtxrun.cmd -2\showPdf=false -3\arguments=--autogenerate, --script, context, --purgeall -3\name=ConTeXt MkIV Purge -3\program=mtxrun.cmd -3\showPdf=false -4\arguments=/c, start, mtxrun.cmd, --autogenerate, --script, server, --auto -4\name=ConTeXt MkIV Server -4\program=cmd.exe -4\showPdf=false -5\name=ConTeXt MkIV Update -5\program=mtxrun.cmd -5\arguments=--autogenerate, --script, update, --force, --make -5\showPdf=false -6\name=ConTeXt MkIV Make -6\program=mtxrun.cmd -6\arguments=--autogenerate, --script, context, --generate, --make -6\showPdf=false diff --git a/Master/texmf-dist/context/data/texworks/configuration/tools.ini b/Master/texmf-dist/context/data/texworks/configuration/tools.ini new file mode 100644 index 00000000000..73071028b41 --- /dev/null +++ b/Master/texmf-dist/context/data/texworks/configuration/tools.ini @@ -0,0 +1,35 @@ +[001] +name=ConTeXt MkIV +program=mtxrun +arguments=--autogenerate, --script, context, $fullname +showPdf=true + +[002] +name=ConTeXt MkIV Check +program=mtxrun +arguments=--autogenerate, --script, check, $fullname +showPdf=false + +[003] +name=ConTeXt MkIV Purge +program=mtxrun +arguments=--autogenerate, --script, context, --purgeall +showPdf=false + +[004] +name=ConTeXt MkIV Server +program=cmd.exe +arguments=/c, start, mtxrun, --autogenerate, --script, server, --auto +showPdf=false + +[005] +name=ConTeXt MkIV Update +program=mtxrun +arguments=--autogenerate, --script, update, --force, --make +showPdf=false + +[006] +name=ConTeXt MkIV Make +program=mtxrun +arguments=--autogenerate, --script, context, --generate, --make +showPdf=false diff --git a/Master/texmf-dist/fonts/enc/dvips/context/ec-2004.enc b/Master/texmf-dist/fonts/enc/dvips/context/ec-2004.enc new file mode 100644 index 00000000000..f092a34a2fe --- /dev/null +++ b/Master/texmf-dist/fonts/enc/dvips/context/ec-2004.enc @@ -0,0 +1,82 @@ +% +% This is the EC encoding. +% +% These are the ligs and kerns in addition to those found in the +% AFM file. (The only ligs in the Times-Roman.afm file are the +% fi and fl ligatures.) +% +% LIGKERN space l =: lslash ; space L =: Lslash ; +% LIGKERN question quoteleft =: questiondown ; exclam quoteleft =: exclamdown ; +% LIGKERN hyphen hyphen =: endash ; endash hyphen =: emdash ; +% LIGKERN quoteleft quoteleft =: quotedblleft ; +% LIGKERN quoteright quoteright =: quotedblright ; +% +% We blow away kerns to and from spaces (TeX doesn't have a +% space) and also remove any kerns from the numbers (although +% the only kern pair that mentions a number in Times-Roman.afm +% is one one.) +% +% LIGKERN space {} * ; * {} space ; zero {} * ; * {} zero ; +% LIGKERN one {} * ; * {} one ; two {} * ; * {} two ; +% LIGKERN three {} * ; * {} three ; four {} * ; * {} four ; +% LIGKERN five {} * ; * {} five ; six {} * ; * {} six ; +% LIGKERN seven {} * ; * {} seven ; eight {} * ; * {} eight ; +% LIGKERN nine {} * ; * {} nine ; +% LIGKERN comma comma =: quotedblbase ; less less =: guillemotleft ; +% LIGKERN greater greater =: guillemotright ; +% +/ECEncoding [ % now 256 chars follow +% 0x00 + /grave /acute /circumflex /tilde /dieresis /hungarumlaut /ring /caron + /breve /macron /dotaccent /cedilla + /ogonek /quotesinglbase /guilsinglleft /guilsinglright +% 0x10 + /quotedblleft /quotedblright /quotedblbase /guillemotleft + /guillemotright /endash /emdash /cwm + /zeroinferior /dotlessi /dotlessj /ff /fi /fl /ffi /ffl +% 0x20 + /visualspace /exclam /quotedbl /numbersign + /dollar /percent /ampersand /quoteright + /parenleft /parenright /asterisk /plus /comma /hyphen /period /slash +% 0x30 + /zero /one /two /three /four /five /six /seven + /eight /nine /colon /semicolon /less /equal /greater /question +% 0x40 + /at /A /B /C /D /E /F /G /H /I /J /K /L /M /N /O +% 0x50 + /P /Q /R /S /T /U /V /W + /X /Y /Z /bracketleft /backslash /bracketright /asciicircum /underscore +% 0x60 + /quoteleft /a /b /c /d /e /f /g /h /i /j /k /l /m /n /o +% 0x70 + /p /q /r /s /t /u /v /w + /x /y /z /braceleft /bar /braceright /asciitilde /hyphen +% 0x80 + /Abreve /Aogonek /Cacute /Ccaron /Dcaron /Ecaron /Eogonek /Gbreve + /Lacute /Lcaron /Lslash /Nacute /Ncaron /Ng /Ohungarumlaut /Racute +% 0x90 + /Rcaron /Sacute /Scaron /Scedilla + /Tcaron /Tcedilla /Uhungarumlaut /Uring + /Ydieresis /Zacute /Zcaron /Zdotaccent /IJ /Idotaccent /dbar /section +% 0xA0 + /abreve /aogonek /cacute /ccaron /dcaron /ecaron /eogonek /gbreve + /lacute /lcaron /lslash /nacute /ncaron /ng /ohungarumlaut /racute +% 0xB0 + /rcaron /sacute /scaron /scedilla + /tquoteright /tcedilla /uhungarumlaut /uring + /ldot /zacute /zcaron /zdotaccent /ij /exclamdown /questiondown /sterling +% 0xC0 + /Agrave /Aacute /Acircumflex /Atilde /Adieresis /Aring /AE /Ccedilla + /Egrave /Eacute /Ecircumflex /Edieresis + /Igrave /Iacute /Icircumflex /Idieresis +% 0xD0 + /Eth /Ntilde /Ograve /Oacute /Ocircumflex /Otilde /Odieresis /OE + /Oslash /Ugrave /Uacute /Ucircumflex /Udieresis /Yacute /Thorn /Germandbls +% 0xE0 + /agrave /aacute /acircumflex /atilde /adieresis /aring /ae /ccedilla + /egrave /eacute /ecircumflex /edieresis + /igrave /iacute /icircumflex /idieresis +% 0xF0 + /eth /ntilde /ograve /oacute /ocircumflex /otilde /odieresis /oe + /oslash /ugrave /uacute /ucircumflex /udieresis /yacute /thorn /germandbls +] def diff --git a/Master/texmf-dist/fonts/map/pdftex/context/mkiv-base.map b/Master/texmf-dist/fonts/map/pdftex/context/mkiv-base.map new file mode 100644 index 00000000000..482bf089466 --- /dev/null +++ b/Master/texmf-dist/fonts/map/pdftex/context/mkiv-base.map @@ -0,0 +1,252 @@ +% some left-overs + +% context + +contnav ContextNavigation 0 then - return self .. self.rep(chr or " ",m) + return self .. rep(chr or " ",m) else return self end @@ -174,7 +196,7 @@ end function string:lpadd(n,chr) local m = n-#self if m > 0 then - return self.rep(chr or " ",m) .. self + return rep(chr or " ",m) .. self else return self end @@ -222,6 +244,17 @@ function string:pattesc() return (gsub(self,".",patterns_escapes)) end +local simple_escapes = { + ["-"] = "%-", + ["."] = "%.", + ["?"] = ".", + ["*"] = ".*", +} + +function string:simpleesc() + return (gsub(self,".",simple_escapes)) +end + function string:tohash() local t = { } for s in gmatch(self,"([^, ]+)") do -- lpeg @@ -233,10 +266,10 @@ end local pattern = lpeg.Ct(lpeg.C(1)^0) function string:totable() - return pattern:match(self) + return lpegmatch(pattern,self) end ---~ for _, str in ipairs { +--~ local t = { --~ "1234567123456712345671234567", --~ "a\tb\tc", --~ "aa\tbb\tcc", @@ -244,7 +277,10 @@ end --~ "aaaa\tbbbb\tcccc", --~ "aaaaa\tbbbbb\tccccc", --~ "aaaaaa\tbbbbbb\tcccccc", ---~ } do print(string.tabtospace(str)) end +--~ } +--~ for k,v do +--~ print(string.tabtospace(t[k])) +--~ end function string.tabtospace(str,tab) -- we don't handle embedded newlines @@ -252,7 +288,7 @@ function string.tabtospace(str,tab) local s = find(str,"\t") if s then if not tab then tab = 7 end -- only when found - local d = tab-(s-1)%tab + local d = tab-(s-1) % tab if d > 0 then str = gsub(str,"\t",rep(" ",d),1) else @@ -271,6 +307,25 @@ function string:compactlong() -- strips newlines and leading spaces return self end +function string:striplong() -- strips newlines and leading spaces + self = gsub(self,"^%s*","") + self = gsub(self,"[\n\r]+ *","\n") + return self +end + +function string:topattern(lowercase,strict) + if lowercase then + self = lower(self) + end + self = gsub(self,".",simple_escapes) + if self == "" then + self = ".*" + elseif strict then + self = "^" .. self .. "$" + end + return self +end + end -- of closure @@ -278,58 +333,64 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-lpeg'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc - ---~ l-lpeg.lua : - ---~ lpeg.digit = lpeg.R('09')^1 ---~ lpeg.sign = lpeg.S('+-')^1 ---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1) ---~ lpeg.number = lpeg.float + lpeg.integer ---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1 ---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1 ---~ lpeg.uppercase = lpeg.P("AZ") ---~ lpeg.lowercase = lpeg.P("az") - ---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed ---~ lpeg.space = lpeg.S(' ')^1 ---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1 ---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1 ---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1 - -local hash = { } +local lpeg = require("lpeg") + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + +local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V +local match = lpeg.match + +local digit, sign = R('09'), S('+-') +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local utf8byte = R("\128\191") + +patterns.utf8byte = utf8byte +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8byte +patterns.utf8three = R("\224\239") * utf8byte * utf8byte +patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte + +patterns.digit = digit +patterns.sign = sign +patterns.cardinal = sign^0 * digit^1 +patterns.integer = sign^0 * digit^1 +patterns.float = sign^0 * digit^0 * P('.') * digit^1 +patterns.number = patterns.float + patterns.integer +patterns.oct = P("0") * R("07")^1 +patterns.octal = patterns.oct +patterns.HEX = P("0x") * R("09","AF")^1 +patterns.hex = P("0x") * R("09","af")^1 +patterns.hexadecimal = P("0x") * R("09","AF","af")^1 +patterns.lowercase = R("az") +patterns.uppercase = R("AZ") +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = S(" ") +patterns.eol = S("\n\r") +patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) +patterns.newline = crlf + cr + lf +patterns.nonspace = 1 - patterns.space +patterns.nonspacer = 1 - patterns.spacer +patterns.whitespace = patterns.eol + patterns.spacer +patterns.nonwhitespace = 1 - patterns.whitespace +patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * lpeg.V(1) } -end - -function lpeg.startswith(pattern) --slightly adapted - return P(pattern) + return P { P(pattern) + 1 * V(1) } -- why so complex? end function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end --- variant: - ---~ local parser = lpeg.Ct(lpeg.splitat(newline)) - -local crlf = P("\r\n") -local cr = P("\r") -local lf = P("\n") -local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -local newline = crlf + cr + lf -local spacing = space^0 * newline - +local spacing = patterns.spacer^0 * patterns.newline -- sort of strip local empty = spacing * Cc("") local nonempty = Cs((1-spacing)^1) * spacing^-1 local content = (empty + nonempty)^1 @@ -337,15 +398,15 @@ local content = (empty + nonempty)^1 local capture = Ct(content^0) function string:splitlines() - return capture:match(self) + return match(capture,self) end -lpeg.linebyline = content -- better make a sublibrary +patterns.textline = content ---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more ---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more ---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps ---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps local splitters_s, splitters_m = { }, { } @@ -355,7 +416,7 @@ local function splitat(separator,single) separator = P(separator) if single then local other, any = C((1 - separator)^0), P(1) - splitter = other * (separator * C(any^0) + "") + splitter = other * (separator * C(any^0) + "") -- ? splitters_s[separator] = splitter else local other = C((1 - separator)^0) @@ -370,15 +431,72 @@ lpeg.splitat = splitat local cache = { } +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = Ct(splitat(separator)) + cache[separator] = c + end + return match(c,str) +end + function string:split(separator) local c = cache[separator] if not c then c = Ct(splitat(separator)) cache[separator] = c end - return c:match(self) + return match(c,self) +end + +lpeg.splitters = cache + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,str) +end + +function string:checkedsplit(separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,self) end +--~ function lpeg.append(list,pp) +--~ local p = pp +--~ for l=1,#list do +--~ if p then +--~ p = p + P(list[l]) +--~ else +--~ p = P(list[l]) +--~ end +--~ end +--~ return p +--~ end + +--~ from roberto's site: + +local f1 = string.byte + +local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + end -- of closure @@ -386,7 +504,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-table'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -395,9 +513,10 @@ if not modules then modules = { } end modules ['l-table'] = { table.join = table.concat local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove -local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump +local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match local getmetatable, setmetatable = getmetatable, setmetatable -local type, next, tostring, ipairs = type, next, tostring, ipairs +local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs +local unpack = unpack or table.unpack function table.strip(tab) local lst = { } @@ -412,6 +531,14 @@ function table.strip(tab) return lst end +function table.keys(t) + local k = { } + for key, _ in next, t do + k[#k+1] = key + end + return k +end + local function compare(a,b) return (tostring(a) < tostring(b)) end @@ -455,7 +582,7 @@ end table.sortedkeys = sortedkeys table.sortedhashkeys = sortedhashkeys -function table.sortedpairs(t) +function table.sortedhash(t) local s = sortedhashkeys(t) -- maybe just sortedkeys local n = 0 local function kv(s) @@ -466,6 +593,8 @@ function table.sortedpairs(t) return kv, s end +table.sortedpairs = table.sortedhash + function table.append(t, list) for _,v in next, list do insert(t,v) @@ -588,18 +717,18 @@ end -- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) -function table.is_empty(t) +function table.is_empty(t) -- obolete, use inline code instead return not t or not next(t) end -function table.one_entry(t) +function table.one_entry(t) -- obolete, use inline code instead local n = next(t) return n and not next(t,n) end -function table.starts_at(t) - return ipairs(t,1)(t,0) -end +--~ function table.starts_at(t) -- obsolete, not nice +--~ return ipairs(t,1)(t,0) +--~ end function table.tohash(t,value) local h = { } @@ -677,6 +806,8 @@ end -- -- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) +-- problem: there no good number_to_string converter with the best resolution + local function do_serialize(root,name,depth,level,indexed) if level > 0 then depth = depth .. " " @@ -699,8 +830,9 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s{",depth)) end end + -- we could check for k (index) being number (cardinal) if root and next(root) then - local first, last = nil, 0 -- #root cannot be trusted here + local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) if compact then -- NOT: for k=1,#root do (we need to quit at nil) for k,v in ipairs(root) do -- can we use next? @@ -721,10 +853,10 @@ local function do_serialize(root,name,depth,level,indexed) if hexify then handle(format("%s 0x%04X,",depth,v)) else - handle(format("%s %s,",depth,v)) + handle(format("%s %s,",depth,v)) -- %.99g end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) @@ -761,29 +893,29 @@ local function do_serialize(root,name,depth,level,indexed) --~ if hexify then --~ handle(format("%s %s=0x%04X,",depth,key(k),v)) --~ else - --~ handle(format("%s %s=%s,",depth,key(k),v)) + --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g --~ end if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) else - handle(format("%s [%s]=%s,",depth,k,v)) + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g end elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then if hexify then handle(format("%s %s=0x%04X,",depth,k,v)) else - handle(format("%s %s=%s,",depth,k,v)) + handle(format("%s %s=%s,",depth,k,v)) -- %.99g end else if hexify then handle(format("%s [%q]=0x%04X,",depth,k,v)) else - handle(format("%s [%q]=%s,",depth,k,v)) + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g end end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then --~ handle(format("%s %s=%s,",depth,key(k),v)) if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then @@ -992,7 +1124,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify) end end -local function flatten(t,f,complete) +local function flatten(t,f,complete) -- is this used? meybe a variant with next, ... for i=1,#t do local v = t[i] if type(v) == "table" then @@ -1021,6 +1153,24 @@ end table.flatten_one_level = table.unnest +-- a better one: + +local function flattened(t,f) + if not f then + f = { } + end + for k, v in next, t do + if type(v) == "table" then + flattened(v,f) + else + f[k] = v + end + end + return f +end + +table.flattened = flattened + -- the next three may disappear function table.remove_value(t,value) -- todo: n @@ -1156,7 +1306,7 @@ function table.clone(t,p) -- t is optional or nil or table elseif not t then t = { } end - setmetatable(t, { __index = function(_,key) return p[key] end }) + setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ? return t end @@ -1184,21 +1334,35 @@ function table.reverse(t) return tt end ---~ function table.keys(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return k ---~ end +function table.insert_before_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i,extra) + return + end + end + insert(t,1,extra) +end ---~ function table.keys_as_string(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return concat(k,"") ---~ end +function table.insert_after_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i+1,extra) + return + end + end + insert(t,#t+1,extra) +end end -- of closure @@ -1207,13 +1371,13 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-io'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local byte = string.byte +local byte, find, gsub = string.byte, string.find, string.gsub if string.find(os.getenv("PATH"),";") then io.fileseparator, io.pathseparator = "\\", ";" @@ -1242,7 +1406,7 @@ function io.savedata(filename,data,joiner) elseif type(data) == "function" then data(f) else - f:write(data) + f:write(data or "") end f:close() return true @@ -1371,20 +1535,21 @@ function io.ask(question,default,options) end io.write(string.format(" ")) local answer = io.read() - answer = answer:gsub("^%s*(.*)%s*$","%1") + answer = gsub(answer,"^%s*(.*)%s*$","%1") if answer == "" and default then return default elseif not options then return answer else - for _,v in pairs(options) do - if v == answer then + for k=1,#options do + if options[k] == answer then return answer end end local pattern = "^" .. answer - for _,v in pairs(options) do - if v:find(pattern) then + for k=1,#options do + local v = options[k] + if find(v,pattern) then return v end end @@ -1399,20 +1564,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-number'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format = string.format +local tostring = tostring +local format, floor, insert, match = string.format, math.floor, table.insert, string.match +local lpegmatch = lpeg.match number = number or { } -- a,b,c,d,e,f = number.toset(100101) function number.toset(n) - return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") + return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") end function number.toevenhex(n) @@ -1438,10 +1605,21 @@ end local one = lpeg.C(1-lpeg.S(''))^1 function number.toset(n) - return one:match(tostring(n)) + return lpegmatch(one,tostring(n)) end - +function number.bits(n,zero) + local t, i = { }, (zero and 0) or 1 + while n > 0 do + local m = n % 2 + if m > 0 then + insert(t,1,i) + end + n = floor(n/2) + i = i + 1 + end + return t +end end -- of closure @@ -1450,7 +1628,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-set'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1540,46 +1718,63 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-os'] = { version = 1.001, - comment = "companion to luat-lub.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local find = string.find +-- maybe build io.flush in os.execute + +local find, format, gsub = string.find, string.format, string.gsub +local random, ceil = math.random, math.ceil + +local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush + +function os.execute(...) ioflush() return execute(...) end +function os.spawn (...) ioflush() return spawn (...) end +function os.exec (...) ioflush() return exec (...) end function os.resultof(command) - return io.popen(command,"r"):read("*all") + ioflush() -- else messed up logging + local handle = io.popen(command,"r") + if not handle then + -- print("unknown command '".. command .. "' in os.resultof") + return "" + else + return handle:read("*all") or "" + end end -if not os.exec then os.exec = os.execute end -if not os.spawn then os.spawn = os.execute end - ---~ os.type : windows | unix (new, we already guessed os.platform) ---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.type : windows | unix (new, we already guessed os.platform) +--~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.platform : extended os.name with architecture if not io.fileseparator then if find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator, os.platform = "\\", ";", os.type or "windows" + io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" else - io.fileseparator, io.pathseparator, os.platform = "/" , ":", os.type or "unix" + io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" end end -os.platform = os.platform or os.type or (io.pathseparator == ";" and "windows") or "unix" +os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" +os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" + +if os.type == "windows" then + os.libsuffix, os.binsuffix = 'dll', 'exe' +else + os.libsuffix, os.binsuffix = 'so', '' +end function os.launch(str) - if os.platform == "windows" then + if os.type == "windows" then os.execute("start " .. str) -- os.spawn ? else os.execute(str .. " &") -- os.spawn ? end end -if not os.setenv then - function os.setenv() return false end -end - if not os.times then -- utime = user time -- stime = system time @@ -1609,64 +1804,218 @@ end --~ print(os.date("%H:%M:%S",os.gettimeofday())) --~ print(os.date("%H:%M:%S",os.time())) -os.arch = os.arch or function() - local a = os.resultof("uname -m") or "linux" - os.arch = function() - return a +-- no need for function anymore as we have more clever code and helpers now +-- this metatable trickery might as well disappear + +os.resolvers = os.resolvers or { } + +local resolvers = os.resolvers + +local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil +local osix = osmt.__index + +osmt.__index = function(t,k) + return (resolvers[k] or osix)(t,k) +end + +setmetatable(os,osmt) + +if not os.setenv then + + -- we still store them but they won't be seen in + -- child processes although we might pass them some day + -- using command concatination + + local env, getenv = { }, os.getenv + + function os.setenv(k,v) + env[k] = v + end + + function os.getenv(k) + return env[k] or getenv(k) end - return a + end -local platform +-- we can use HOSTTYPE on some platforms -function os.currentplatform(name,default) - if not platform then - local name = os.name or os.platform or name -- os.name is built in, os.platform is mine - if not name then - platform = default or "linux" - elseif name == "windows" or name == "mswin" or name == "win32" or name == "msdos" then - if os.getenv("PROCESSOR_ARCHITECTURE") == "AMD64" then - platform = "mswin-64" - else - platform = "mswin" - end +local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" + +local function guess() + local architecture = os.resultof("uname -m") or "" + if architecture ~= "" then + return architecture + end + architecture = os.getenv("HOSTTYPE") or "" + if architecture ~= "" then + return architecture + end + return os.resultof("echo $HOSTTYPE") or "" +end + +if platform ~= "" then + + os.platform = platform + +elseif os.type == "windows" then + + -- we could set the variable directly, no function needed here + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" + if find(architecture,"AMD64") then + platform = "mswin-64" else - local architecture = os.arch() - if name == "linux" then - if find(architecture,"x86_64") then - platform = "linux-64" - elseif find(architecture,"ppc") then - platform = "linux-ppc" - else - platform = "linux" - end - elseif name == "macosx" then - if find(architecture,"i386") then - platform = "osx-intel" - else - platform = "osx-ppc" - end - elseif name == "sunos" then - if find(architecture,"sparc") then - platform = "solaris-sparc" - else -- if architecture == 'i86pc' - platform = "solaris-intel" - end - elseif name == "freebsd" then - if find(architecture,"amd64") then - platform = "freebsd-amd64" - else - platform = "freebsd" - end - else - platform = default or name - end + platform = "mswin" end - function os.currentplatform() - return platform + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "linux" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "linux-64" + elseif find(architecture,"ppc") then + platform = "linux-ppc" + else + platform = "linux" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "macosx" then + + --[[ + Identifying the architecture of OSX is quite a mess and this + is the best we can come up with. For some reason $HOSTTYPE is + a kind of pseudo environment variable, not known to the current + environment. And yes, uname cannot be trusted either, so there + is a change that you end up with a 32 bit run on a 64 bit system. + Also, some proper 64 bit intel macs are too cheap (low-end) and + therefore not permitted to run the 64 bit kernel. + ]]-- + + function os.resolvers.platform(t,k) + -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" + -- if architecture == "" then + -- architecture = os.resultof("echo $HOSTTYPE") or "" + -- end + local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" + if architecture == "" then + -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") + platform = "osx-intel" + elseif find(architecture,"i386") then + platform = "osx-intel" + elseif find(architecture,"x86_64") then + platform = "osx-64" + else + platform = "osx-ppc" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "sunos" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"sparc") then + platform = "solaris-sparc" + else -- if architecture == 'i86pc' + platform = "solaris-intel" end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "freebsd" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"amd64") then + platform = "freebsd-amd64" + else + platform = "freebsd" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "kfreebsd" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "kfreebsd-64" + else + platform = "kfreebsd-i386" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +else + + -- platform = "linux" + -- os.setenv("MTX_PLATFORM",platform) + -- os.platform = platform + + function os.resolvers.platform(t,k) + local platform = "linux" + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +end + +-- beware, we set the randomseed + +-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the +-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom +-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal +-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. +-- +-- as we don't call this function too often there is not so much risk on repetition + +local t = { 8, 9, "a", "b" } + +function os.uuid() + return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", + random(0xFFFF),random(0xFFFF), + random(0x0FFF), + t[ceil(random(4))] or 8,random(0x0FFF), + random(0xFFFF), + random(0xFFFF),random(0xFFFF),random(0xFFFF) + ) +end + +local d + +function os.timezone(delta) + d = d or tonumber(tonumber(os.date("%H")-os.date("!%H"))) + if delta then + if d > 0 then + return format("+%02i:00",d) + else + return format("-%02i:00",-d) + end + else + return 1 end - return platform end @@ -1676,7 +2025,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-file'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1687,14 +2036,17 @@ if not modules then modules = { } end modules ['l-file'] = { file = file or { } local concat = table.concat -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char +local lpegmatch = lpeg.match function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) end function file.addsuffix(filename, suffix) - if not find(filename,"%.[%a%d]+$") then + if not suffix or suffix == "" then + return filename + elseif not find(filename,"%.[%a%d]+$") then return filename .. "." .. suffix else return filename @@ -1717,20 +2069,39 @@ function file.nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$","")) end -function file.extname(name) - return match(name,"^.+%.([^/\\]-)$") or "" +function file.extname(name,default) + return match(name,"^.+%.([^/\\]-)$") or default or "" end file.suffix = file.extname ---~ print(file.join("x/","/y")) ---~ print(file.join("http://","/y")) ---~ print(file.join("http://a","/y")) ---~ print(file.join("http:///a","/y")) ---~ print(file.join("//nas-1","/y")) +--~ function file.join(...) +--~ local pth = concat({...},"/") +--~ pth = gsub(pth,"\\","/") +--~ local a, b = match(pth,"^(.*://)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ a, b = match(pth,"^(//)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ return (gsub(pth,"//+","/")) +--~ end + +local trick_1 = char(1) +local trick_2 = "^" .. trick_1 .. "/+" function file.join(...) - local pth = concat({...},"/") + local lst = { ... } + local a, b = lst[1], lst[2] + if a == "" then + lst[1] = trick_1 + elseif b and find(a,"^/+$") and find(b,"^/") then + lst[1] = "" + lst[2] = gsub(b,"^/+","") + end + local pth = concat(lst,"/") pth = gsub(pth,"\\","/") local a, b = match(pth,"^(.*://)(.*)$") if a and b then @@ -1740,17 +2111,28 @@ function file.join(...) if a and b then return a .. gsub(b,"//+","/") end + pth = gsub(pth,trick_2,"") return (gsub(pth,"//+","/")) end +--~ print(file.join("//","/y")) +--~ print(file.join("/","/y")) +--~ print(file.join("","/y")) +--~ print(file.join("/x/","/y")) +--~ print(file.join("x/","/y")) +--~ print(file.join("http://","/y")) +--~ print(file.join("http://a","/y")) +--~ print(file.join("http:///a","/y")) +--~ print(file.join("//nas-1","/y")) + function file.iswritable(name) local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,".")) - return a and a.permissions:sub(2,2) == "w" + return a and sub(a.permissions,2,2) == "w" end function file.isreadable(name) local a = lfs.attributes(name) - return a and a.permissions:sub(1,1) == "r" + return a and sub(a.permissions,1,1) == "r" end file.is_readable = file.isreadable @@ -1758,36 +2140,50 @@ file.is_writable = file.iswritable -- todo: lpeg -function file.split_path(str) - local t = { } - str = gsub(str,"\\", "/") - str = gsub(str,"(%a):([;/])", "%1\001%2") - for name in gmatch(str,"([^;:]+)") do - if name ~= "" then - t[#t+1] = gsub(name,"\001",":") - end - end - return t +--~ function file.split_path(str) +--~ local t = { } +--~ str = gsub(str,"\\", "/") +--~ str = gsub(str,"(%a):([;/])", "%1\001%2") +--~ for name in gmatch(str,"([^;:]+)") do +--~ if name ~= "" then +--~ t[#t+1] = gsub(name,"\001",":") +--~ end +--~ end +--~ return t +--~ end + +local checkedsplit = string.checkedsplit + +function file.split_path(str,separator) + str = gsub(str,"\\","/") + return checkedsplit(str,separator or io.pathseparator) end function file.join_path(tab) return concat(tab,io.pathseparator) -- can have trailing // end +-- we can hash them weakly + function file.collapse_path(str) - str = gsub(str,"/%./","/") - local n, m = 1, 1 - while n > 0 or m > 0 do - str, n = gsub(str,"[^/%.]+/%.%.$","") - str, m = gsub(str,"[^/%.]+/%.%./","") - end - str = gsub(str,"([^/])/$","%1") - str = gsub(str,"^%./","") - str = gsub(str,"/%.$","") + str = gsub(str,"\\","/") + if find(str,"/") then + str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified + str = gsub(str,"/%./","/") + local n, m = 1, 1 + while n > 0 or m > 0 do + str, n = gsub(str,"[^/%.]+/%.%.$","") + str, m = gsub(str,"[^/%.]+/%.%./","") + end + str = gsub(str,"([^/])/$","%1") + -- str = gsub(str,"^%./","") -- ./xx in qualified + str = gsub(str,"/%.$","") + end if str == "" then str = "." end return str end +--~ print(file.collapse_path("/a")) --~ print(file.collapse_path("a/./b/..")) --~ print(file.collapse_path("a/aa/../b/bb")) --~ print(file.collapse_path("a/../..")) @@ -1817,27 +2213,27 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1 --~ function file.extname(name) ---~ return pattern:match(name) or "" +--~ return lpegmatch(pattern,name) or "" --~ end --~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1) --~ function file.removesuffix(name) ---~ return pattern:match(name) +--~ return lpegmatch(pattern,name) --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1 --~ function file.basename(name) ---~ return pattern:match(name) or name +--~ return lpegmatch(pattern,name) or name --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1 --~ function file.dirname(name) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) +--~ return sub(name,1,p-2) --~ else --~ return "" --~ end @@ -1846,7 +2242,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.addsuffix(name, suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then --~ return name --~ else @@ -1857,9 +2253,9 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.replacesuffix(name,suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) .. "." .. suffix +--~ return sub(name,1,p-2) .. "." .. suffix --~ else --~ return name .. "." .. suffix --~ end @@ -1868,11 +2264,11 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1 --~ function file.nameonly(name) ---~ local a, b = pattern:match(name) +--~ local a, b = lpegmatch(pattern,name) --~ if b then ---~ return name:sub(a,b-2) +--~ return sub(name,a,b-2) --~ elseif a then ---~ return name:sub(a) +--~ return sub(name,a) --~ else --~ return name --~ end @@ -1906,11 +2302,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":") -- ./name ../name /name c: :// name/name function file.is_qualified_path(filename) - return qualified:match(filename) + return lpegmatch(qualified,filename) ~= nil end function file.is_rootbased_path(filename) - return rootbased:match(filename) + return lpegmatch(rootbased,filename) ~= nil end local slash = lpeg.S("\\/") @@ -1923,16 +2319,25 @@ local base = lpeg.C((1-suffix)^0) local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc("")) function file.splitname(str) -- returns drive, path, base, suffix - return pattern:match(str) + return lpegmatch(pattern,str) end --- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end -- -- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } -- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } -- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } -- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } +--~ -- todo: +--~ +--~ if os.type == "windows" then +--~ local currentdir = lfs.currentdir +--~ function lfs.currentdir() +--~ return (gsub(currentdir(),"\\","/")) +--~ end +--~ end + end -- of closure @@ -1997,7 +2402,7 @@ end function file.loadchecksum(name) if md5 then local data = io.loaddata(name .. ".md5") - return data and data:gsub("%s","") + return data and (gsub(data,"%s","")) end return nil end @@ -2018,14 +2423,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-url'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local char, gmatch = string.char, string.gmatch +local char, gmatch, gsub = string.char, string.gmatch, string.gsub local tonumber, type = tonumber, type +local lpegmatch = lpeg.match -- from the spec (on the web): -- @@ -2049,7 +2455,9 @@ local hexdigit = lpeg.R("09","AF","af") local plus = lpeg.P("+") local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar) -local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^0) * colon + lpeg.Cc("") +-- we assume schemes with more than 1 character (in order to avoid problems with windows disks) + +local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("") local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("") local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("") local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("") @@ -2057,25 +2465,51 @@ local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0 local parser = lpeg.Ct(scheme * authority * path * query * fragment) +-- todo: reconsider Ct as we can as well have five return values (saves a table) +-- so we can have two parsers, one with and one without + function url.split(str) - return (type(str) == "string" and parser:match(str)) or str + return (type(str) == "string" and lpegmatch(parser,str)) or str end +-- todo: cache them + function url.hashed(str) local s = url.split(str) + local somescheme = s[1] ~= "" return { - scheme = (s[1] ~= "" and s[1]) or "file", + scheme = (somescheme and s[1]) or "file", authority = s[2], - path = s[3], - query = s[4], - fragment = s[5], - original = str + path = s[3], + query = s[4], + fragment = s[5], + original = str, + noscheme = not somescheme, } end +function url.hasscheme(str) + return url.split(str)[1] ~= "" +end + +function url.addscheme(str,scheme) + return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str) +end + +function url.construct(hash) + local fullurl = hash.sheme .. "://".. hash.authority .. hash.path + if hash.query then + fullurl = fullurl .. "?".. hash.query + end + if hash.fragment then + fullurl = fullurl .. "?".. hash.fragment + end + return fullurl +end + function url.filename(filename) local t = url.hashed(filename) - return (t.scheme == "file" and t.path:gsub("^/([a-zA-Z])([:|])/)","%1:")) or filename + return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename end function url.query(str) @@ -2129,24 +2563,59 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-dir'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- dir.expand_name will be merged with cleanpath and collapsepath + local type = type -local find, gmatch = string.find, string.gmatch +local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local lpegmatch = lpeg.match dir = dir or { } +-- handy + +function dir.current() + return (gsub(lfs.currentdir(),"\\","/")) +end + -- optimizing for no string.find (*) does not save time local attributes = lfs.attributes local walkdir = lfs.dir -local function glob_pattern(path,patt,recurse,action) +local function glob_pattern(path,patt,recurse,action) + local ok, scanner + if path == "/" then + ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe + else + ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe + end + if ok and type(scanner) == "function" then + if not find(path,"/$") then path = path .. '/' end + for name in scanner do + local full = path .. name + local mode = attributes(full,'mode') + if mode == 'file' then + if find(full,patt) then + action(full) + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + glob_pattern(full,patt,recurse,action) + end + end + end +end + +dir.glob_pattern = glob_pattern + +local function collect_pattern(path,patt,recurse,result) local ok, scanner + result = result or { } if path == "/" then ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe else @@ -2156,19 +2625,22 @@ local function glob_pattern(path,patt,recurse,action) if not find(path,"/$") then path = path .. '/' end for name in scanner do local full = path .. name - local mode = attributes(full,'mode') + local attr = attributes(full) + local mode = attr.mode if mode == 'file' then if find(full,patt) then - action(full) + result[name] = attr end elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then - glob_pattern(full,patt,recurse,action) + attr.list = collect_pattern(full,patt,recurse) + result[name] = attr end end end + return result end -dir.glob_pattern = glob_pattern +dir.collect_pattern = collect_pattern local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V @@ -2189,29 +2661,48 @@ local filter = Cs ( ( )^0 ) local function glob(str,t) - if type(str) == "table" then - local t = t or { } - for s=1,#str do - glob(str[s],t) + if type(t) == "function" then + if type(str) == "table" then + for s=1,#str do + glob(str[s],t) + end + elseif lfs.isfile(str) then + t(str) + else + local split = lpegmatch(pattern,str) + if split then + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,t) + end end - return t - elseif lfs.isfile(str) then - local t = t or { } - t[#t+1] = str - return t else - local split = pattern:match(str) - if split then + if type(str) == "table" then local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = filter:match(start .. base) - glob_pattern(start,result,recurse,action) + for s=1,#str do + glob(str[s],t) + end + return t + elseif lfs.isfile(str) then + local t = t or { } + t[#t+1] = str return t else - return { } + local split = lpegmatch(pattern,str) + if split then + local t = t or { } + local action = action or function(name) t[#t+1] = name end + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,action) + return t + else + return { } + end end end end @@ -2273,11 +2764,12 @@ end local make_indeed = true -- false -if string.find(os.getenv("PATH"),";") then +if string.find(os.getenv("PATH"),";") then -- os.type == "windows" function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2288,13 +2780,13 @@ if string.find(os.getenv("PATH"),";") then end local first, middle, last local drive = false - first, middle, last = str:match("^(//)(//*)(.*)$") + first, middle, last = match(str,"^(//)(//*)(.*)$") if first then -- empty network path == local path else - first, last = str:match("^(//)/*(.-)$") + first, last = match(str,"^(//)/*(.-)$") if first then - middle, last = str:match("([^/]+)/+(.-)$") + middle, last = match(str,"([^/]+)/+(.-)$") if middle then pth = "//" .. middle else @@ -2302,11 +2794,11 @@ if string.find(os.getenv("PATH"),";") then last = "" end else - first, middle, last = str:match("^([a-zA-Z]:)(/*)(.-)$") + first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") if first then pth, drive = first .. middle, true else - middle, last = str:match("^(/*)(.-)$") + middle, last = match(str,"^(/*)(.-)$") if not middle then last = str end @@ -2340,34 +2832,31 @@ if string.find(os.getenv("PATH"),";") then --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) - local first, nothing, last = str:match("^(//)(//*)(.*)$") + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath + local first, nothing, last = match(str,"^(//)(//*)(.*)$") if first then - first = lfs.currentdir() .. "/" - first = first:gsub("\\","/") + first = dir.current() .. "/" end if not first then - first, last = str:match("^(//)/*(.*)$") + first, last = match(str,"^(//)/*(.*)$") end if not first then - first, last = str:match("^([a-zA-Z]:)(.*)$") + first, last = match(str,"^([a-zA-Z]:)(.*)$") if first and not find(last,"^/") then local d = lfs.currentdir() if lfs.chdir(first) then - first = lfs.currentdir() - first = first:gsub("\\","/") + first = dir.current() end lfs.chdir(d) end end if not first then - first, last = lfs.currentdir(), str - first = first:gsub("\\","/") + first, last = dir.current(), str end - last = last:gsub("//","/") - last = last:gsub("/%./","/") - last = last:gsub("^/*","") - first = first:gsub("/*$","") + last = gsub(last,"//","/") + last = gsub(last,"/%./","/") + last = gsub(last,"^/*","") + first = gsub(first,"/*$","") if last == "" then return first else @@ -2378,8 +2867,9 @@ if string.find(os.getenv("PATH"),";") then else function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2388,7 +2878,7 @@ else end end end - str = str:gsub("/+","/") + str = gsub(str,"/+","/") if find(str,"^/") then pth = "/" for s in gmatch(str,"[^/]+") do @@ -2422,12 +2912,12 @@ else --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath if not find(str,"^/") then str = lfs.currentdir() .. "/" .. str end - str = str:gsub("//","/") - str = str:gsub("/%./","/") + str = gsub(str,"//","/") + str = gsub(str,"/%./","/") return str end @@ -2442,7 +2932,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-boolean'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2503,19 +2993,40 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-unicode'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +if not unicode then + + unicode = { utf8 = { } } + + local floor, char = math.floor, string.char + + function unicode.utf8.utfchar(n) + if n < 0x80 then + return char(n) + elseif n < 0x800 then + return char(0xC0 + floor(n/0x40)) .. char(0x80 + (n % 0x40)) + elseif n < 0x10000 then + return char(0xE0 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + elseif n < 0x40000 then + return char(0xF0 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + else -- wrong: + -- return char(0xF1 + floor(n/0x1000000)) .. char(0x80 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + return "?" + end + end + +end + utf = utf or unicode.utf8 local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub local char, byte, find, bytepairs = string.char, string.byte, string.find, string.bytepairs -unicode = unicode or { } - -- 0 EF BB BF UTF-8 -- 1 FF FE UTF-16-little-endian -- 2 FE FF UTF-16-big-endian @@ -2530,14 +3041,20 @@ unicode.utfname = { [4] = 'utf-32-be' } -function unicode.utftype(f) -- \000 fails ! +-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated + +function unicode.utftype(f) local str = f:read(4) if not str then f:seek('set') return 0 - elseif find(str,"^%z%z\254\255") then + -- elseif find(str,"^%z%z\254\255") then -- depricated + -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged + elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH) return 4 - elseif find(str,"^\255\254%z%z") then + -- elseif find(str,"^\255\254%z%z") then -- depricated + -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged + elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH) return 3 elseif find(str,"^\254\255") then f:seek('set',2) @@ -2681,7 +3198,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-math'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2728,7 +3245,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-utils'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2736,6 +3253,10 @@ if not modules then modules = { } end modules ['l-utils'] = { -- hm, quite unreadable +local gsub = string.gsub +local concat = table.concat +local type, next = type, next + if not utils then utils = { } end if not utils.merger then utils.merger = { } end if not utils.lua then utils.lua = { } end @@ -2773,7 +3294,7 @@ function utils.merger._self_load_(name) end if data and utils.merger.strip_comment then -- saves some 20K - data = data:gsub("%-%-~[^\n\r]*[\r\n]", "") + data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "") end return data or "" end @@ -2791,7 +3312,7 @@ end function utils.merger._self_swap_(data,code) if data ~= "" then - return (data:gsub(utils.merger.pattern, function(s) + return (gsub(data,utils.merger.pattern, function(s) return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n" end, 1)) else @@ -2801,8 +3322,8 @@ end --~ stripper: --~ ---~ data = string.gsub(data,"%-%-~[^\n]*\n","") ---~ data = string.gsub(data,"\n\n+","\n") +--~ data = gsub(data,"%-%-~[^\n]*\n","") +--~ data = gsub(data,"\n\n+","\n") function utils.merger._self_libs_(libs,list) local result, f, frozen = { }, nil, false @@ -2810,9 +3331,10 @@ function utils.merger._self_libs_(libs,list) if type(libs) == 'string' then libs = { libs } end if type(list) == 'string' then list = { list } end local foundpath = nil - for _, lib in ipairs(libs) do - for _, pth in ipairs(list) do - pth = string.gsub(pth,"\\","/") -- file.clean_path + for i=1,#libs do + local lib = libs[i] + for j=1,#list do + local pth = gsub(list[j],"\\","/") -- file.clean_path utils.report("checking library path %s",pth) local name = pth .. "/" .. lib if lfs.isfile(name) then @@ -2824,7 +3346,8 @@ function utils.merger._self_libs_(libs,list) if foundpath then utils.report("using library path %s",foundpath) local right, wrong = { }, { } - for _, lib in ipairs(libs) do + for i=1,#libs do + local lib = libs[i] local fullname = foundpath .. "/" .. lib if lfs.isfile(fullname) then -- right[#right+1] = lib @@ -2838,15 +3361,15 @@ function utils.merger._self_libs_(libs,list) end end if #right > 0 then - utils.report("merged libraries: %s",table.concat(right," ")) + utils.report("merged libraries: %s",concat(right," ")) end if #wrong > 0 then - utils.report("skipped libraries: %s",table.concat(wrong," ")) + utils.report("skipped libraries: %s",concat(wrong," ")) end else utils.report("no valid library path found") end - return table.concat(result, "\n\n") + return concat(result, "\n\n") end function utils.merger.selfcreate(libs,list,target) @@ -2904,16 +3427,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end + aux = aux or { } local concat, format, gmatch = table.concat, string.format, string.gmatch local tostring, type = tostring, type +local lpegmatch = lpeg.match + +local P, R, V = lpeg.P, lpeg.R, lpeg.V + +local escape, left, right = P("\\"), P('{'), P('}') + +lpeg.patterns.balanced = P { + [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, + [2] = left * V(1) * right +} local space = lpeg.P(' ') local equal = lpeg.P("=") @@ -2921,7 +3456,7 @@ local comma = lpeg.P(",") local lbrace = lpeg.P("{") local rbrace = lpeg.P("}") local nobrace = 1 - (lbrace+rbrace) -local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace } +local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace } local spaces = space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) @@ -2959,13 +3494,13 @@ function aux.make_settings_to_hash_pattern(set,how) end end -function aux.settings_to_hash(str) +function aux.settings_to_hash(str,existing) if str and str ~= "" then - hash = { } + hash = existing or { } if moretolerant then - pattern_b_s:match(str) + lpegmatch(pattern_b_s,str) else - pattern_a_s:match(str) + lpegmatch(pattern_a_s,str) end return hash else @@ -2973,39 +3508,41 @@ function aux.settings_to_hash(str) end end -function aux.settings_to_hash_tolerant(str) +function aux.settings_to_hash_tolerant(str,existing) if str and str ~= "" then - hash = { } - pattern_b_s:match(str) + hash = existing or { } + lpegmatch(pattern_b_s,str) return hash else return { } end end -function aux.settings_to_hash_strict(str) +function aux.settings_to_hash_strict(str,existing) if str and str ~= "" then - hash = { } - pattern_c_s:match(str) + hash = existing or { } + lpegmatch(pattern_c_s,str) return next(hash) and hash else return nil end end -local seperator = comma * space^0 +local separator = comma * space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) -local pattern = lpeg.Ct(value*(seperator*value)^0) +local pattern = lpeg.Ct(value*(separator*value)^0) -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored aux.settings_to_array_pattern = pattern +-- we could use a weak table as cache + function aux.settings_to_array(str) if not str or str == "" then return { } else - return pattern:match(str) + return lpegmatch(pattern,str) end end @@ -3014,10 +3551,10 @@ local function set(t,v) end local value = lpeg.P(lpeg.Carg(1)*value) / set -local pattern = value*(seperator*value)^0 * lpeg.Carg(1) +local pattern = value*(separator*value)^0 * lpeg.Carg(1) function aux.add_settings_to_array(t,str) - return pattern:match(str, nil, t) + return lpegmatch(pattern,str,nil,t) end function aux.hash_to_string(h,separator,yes,no,strict,omit) @@ -3065,6 +3602,13 @@ function aux.settings_to_set(str,t) return t end +local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace +local pattern = lpeg.Ct((space + value)^0) + +function aux.arguments_to_table(str) + return lpegmatch(pattern,str) +end + -- temporary here function aux.getparameters(self,class,parentclass,settings) @@ -3073,36 +3617,31 @@ function aux.getparameters(self,class,parentclass,settings) sc = table.clone(self[parent]) self[class] = sc end - aux.add_settings_to_array(sc, settings) + aux.settings_to_hash(settings,sc) end -- temporary here -local digit = lpeg.R("09") -local period = lpeg.P(".") -local zero = lpeg.P("0") - ---~ local finish = lpeg.P(-1) ---~ local nodigit = (1-digit) + finish ---~ local case_1 = (period * zero^1 * #nodigit)/"" -- .000 ---~ local case_2 = (period * (1-(zero^0/"") * #nodigit)^1 * (zero^0/"") * nodigit) -- .010 .10 .100100 - +local digit = lpeg.R("09") +local period = lpeg.P(".") +local zero = lpeg.P("0") local trailingzeros = zero^0 * -digit -- suggested by Roberto R -local case_1 = period * trailingzeros / "" -local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") - -local number = digit^1 * (case_1 + case_2) -local stripper = lpeg.Cs((number + 1)^0) +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = lpeg.Cs((number + 1)^0) --~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" --~ collectgarbage("collect") --~ str = string.rep(sample,10000) --~ local ts = os.clock() ---~ stripper:match(str) ---~ print(#str, os.clock()-ts, stripper:match(sample)) +--~ lpegmatch(stripper,str) +--~ print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + +lpeg.patterns.strip_zeros = stripper function aux.strip_zeros(str) - return stripper:match(str) + return lpegmatch(stripper,str) end function aux.definetable(target) -- defines undefined tables @@ -3126,6 +3665,24 @@ function aux.accesstable(target) return t end +-- as we use this a lot ... + +--~ function aux.cachefunction(action,weak) +--~ local cache = { } +--~ if weak then +--~ setmetatable(cache, { __mode = "kv" } ) +--~ end +--~ local function reminder(str) +--~ local found = cache[str] +--~ if not found then +--~ found = action(str) +--~ cache[str] = found +--~ end +--~ return found +--~ end +--~ return reminder, cache +--~ end + end -- of closure @@ -3133,7 +3690,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-tra'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-tra.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3143,12 +3700,17 @@ if not modules then modules = { } end modules ['trac-tra'] = { -- bound to a variable, like node.new, node.copy etc (contrary to for instance -- node.has_attribute which is bound to a has_attribute local variable in mkiv) +local debug = require "debug" + +local getinfo = debug.getinfo +local type, next = type, next +local concat = table.concat +local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub + debugger = debugger or { } local counters = { } local names = { } -local getinfo = debug.getinfo -local format, find, lower, gmatch = string.format, string.find, string.lower, string.gmatch -- one @@ -3187,10 +3749,10 @@ function debugger.showstats(printer,threshold) local total, grandtotal, functions = 0, 0, 0 printer("\n") -- ugly but ok -- table.sort(counters) - for func, count in pairs(counters) do + for func, count in next, counters do if count > threshold then local name = getname(func) - if not name:find("for generator") then + if not find(name,"for generator") then printer(format("%8i %s", count, name)) total = total + count end @@ -3222,7 +3784,7 @@ end --~ local total, grandtotal, functions = 0, 0, 0 --~ printer("\n") -- ugly but ok --~ -- table.sort(counters) ---~ for func, count in pairs(counters) do +--~ for func, count in next, counters do --~ if count > threshold then --~ printer(format("%8i %s", count, func)) --~ total = total + count @@ -3276,38 +3838,77 @@ end --~ print("") --~ debugger.showstats(print,3) -trackers = trackers or { } +setters = setters or { } +setters.data = setters.data or { } -local data, done = { }, { } +--~ local function set(t,what,value) +--~ local data, done = t.data, t.done +--~ if type(what) == "string" then +--~ what = aux.settings_to_array(what) -- inefficient but ok +--~ end +--~ for i=1,#what do +--~ local w = what[i] +--~ for d, f in next, data do +--~ if done[d] then +--~ -- prevent recursion due to wildcards +--~ elseif find(d,w) then +--~ done[d] = true +--~ for i=1,#f do +--~ f[i](value) +--~ end +--~ end +--~ end +--~ end +--~ end -local function set(what,value) +local function set(t,what,value) + local data, done = t.data, t.done if type(what) == "string" then - what = aux.settings_to_array(what) + what = aux.settings_to_hash(what) -- inefficient but ok end - for i=1,#what do - local w = what[i] + for w, v in next, what do + if v == "" then + v = value + else + v = toboolean(v) + end for d, f in next, data do if done[d] then -- prevent recursion due to wildcards elseif find(d,w) then done[d] = true for i=1,#f do - f[i](value) + f[i](v) end end end end end -local function reset() - for d, f in next, data do +local function reset(t) + for d, f in next, t.data do for i=1,#f do f[i](false) end end end -function trackers.register(what,...) +local function enable(t,what) + set(t,what,true) +end + +local function disable(t,what) + local data = t.data + if not what or what == "" then + t.done = { } + reset(t) + else + set(t,what,false) + end +end + +function setters.register(t,what,...) + local data = t.data what = lower(what) local w = data[what] if not w then @@ -3319,32 +3920,32 @@ function trackers.register(what,...) if typ == "function" then w[#w+1] = fnc elseif typ == "string" then - w[#w+1] = function(value) set(fnc,value,nesting) end + w[#w+1] = function(value) set(t,fnc,value,nesting) end end end end -function trackers.enable(what) - done = { } - set(what,true) +function setters.enable(t,what) + local e = t.enable + t.enable, t.done = enable, { } + enable(t,string.simpleesc(tostring(what))) + t.enable, t.done = e, { } end -function trackers.disable(what) - done = { } - if not what or what == "" then - trackers.reset(what) - else - set(what,false) - end +function setters.disable(t,what) + local e = t.disable + t.disable, t.done = disable, { } + disable(t,string.simpleesc(tostring(what))) + t.disable, t.done = e, { } end -function trackers.reset(what) - done = { } - reset() +function setters.reset(t) + t.done = { } + reset(t) end -function trackers.list() -- pattern - local list = table.sortedkeys(data) +function setters.list(t) -- pattern + local list = table.sortedkeys(t.data) local user, system = { }, { } for l=1,#list do local what = list[l] @@ -3357,6 +3958,78 @@ function trackers.list() -- pattern return user, system end +function setters.show(t) + commands.writestatus("","") + local list = setters.list(t) + for k=1,#list do + commands.writestatus(t.name,list[k]) + end + commands.writestatus("","") +end + +-- we could have used a bit of oo and the trackers:enable syntax but +-- there is already a lot of code around using the singular tracker + +-- we could make this into a module + +function setters.new(name) + local t + t = { + data = { }, + name = name, + enable = function(...) setters.enable (t,...) end, + disable = function(...) setters.disable (t,...) end, + register = function(...) setters.register(t,...) end, + list = function(...) setters.list (t,...) end, + show = function(...) setters.show (t,...) end, + } + setters.data[name] = t + return t +end + +trackers = setters.new("trackers") +directives = setters.new("directives") +experiments = setters.new("experiments") + +-- nice trick: we overload two of the directives related functions with variants that +-- do tracing (itself using a tracker) .. proof of concept + +local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) +local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) + +local e = directives.enable +local d = directives.disable + +function directives.enable(...) + commands.writestatus("directives","enabling: %s",concat({...}," ")) + e(...) +end + +function directives.disable(...) + commands.writestatus("directives","disabling: %s",concat({...}," ")) + d(...) +end + +local e = experiments.enable +local d = experiments.disable + +function experiments.enable(...) + commands.writestatus("experiments","enabling: %s",concat({...}," ")) + e(...) +end + +function experiments.disable(...) + commands.writestatus("experiments","disabling: %s",concat({...}," ")) + d(...) +end + +-- a useful example + +directives.register("system.nostatistics", function(v) + statistics.enable = not v +end) + + end -- of closure @@ -3364,7 +4037,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-env'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3376,10 +4049,10 @@ if not modules then modules = { } end modules ['luat-env'] = { -- evolved before bytecode arrays were available and so a lot of -- code has disappeared already. -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local format = string.format +local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find +local unquote, quote = string.unquote, string.quote -- precautions @@ -3413,13 +4086,14 @@ if not environment.jobname then environ function environment.initialize_arguments(arg) local arguments, files = { }, { } environment.arguments, environment.files, environment.sortedflags = arguments, files, nil - for index, argument in pairs(arg) do + for index=1,#arg do + local argument = arg[index] if index > 0 then - local flag, value = argument:match("^%-+(.+)=(.-)$") + local flag, value = match(argument,"^%-+(.-)=(.-)$") if flag then - arguments[flag] = string.unquote(value or "") + arguments[flag] = unquote(value or "") else - flag = argument:match("^%-+(.+)") + flag = match(argument,"^%-+(.+)") if flag then arguments[flag] = true else @@ -3446,25 +4120,30 @@ function environment.argument(name,partial) return arguments[name] elseif partial then if not sortedflags then - sortedflags = { } - for _,v in pairs(table.sortedkeys(arguments)) do - sortedflags[#sortedflags+1] = "^" .. v + sortedflags = table.sortedkeys(arguments) + for k=1,#sortedflags do + sortedflags[k] = "^" .. sortedflags[k] end environment.sortedflags = sortedflags end -- example of potential clash: ^mode ^modefile - for _,v in ipairs(sortedflags) do - if name:find(v) then - return arguments[v:sub(2,#v)] + for k=1,#sortedflags do + local v = sortedflags[k] + if find(name,v) then + return arguments[sub(v,2,#v)] end end end return nil end +environment.argument("x",true) + function environment.split_arguments(separator) -- rather special, cut-off before separator local done, before, after = false, { }, { } - for _,v in ipairs(environment.original_arguments) do + local original_arguments = environment.original_arguments + for k=1,#original_arguments do + local v = original_arguments[k] if not done and v == separator then done = true elseif done then @@ -3481,16 +4160,17 @@ function environment.reconstruct_commandline(arg,noquote) if noquote and #arg == 1 then local a = arg[1] a = resolvers.resolve(a) - a = a:unquote() + a = unquote(a) return a - elseif next(arg) then + elseif #arg > 0 then local result = { } - for _,a in ipairs(arg) do -- ipairs 1 .. #n + for i=1,#arg do + local a = arg[i] a = resolvers.resolve(a) - a = a:unquote() - a = a:gsub('"','\\"') -- tricky - if a:find(" ") then - result[#result+1] = a:quote() + a = unquote(a) + a = gsub(a,'"','\\"') -- tricky + if find(a," ") then + result[#result+1] = quote(a) else result[#result+1] = a end @@ -3503,17 +4183,18 @@ end if arg then - -- new, reconstruct quoted snippets (maybe better just remnove the " then and add them later) + -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) local newarg, instring = { }, false - for index, argument in ipairs(arg) do - if argument:find("^\"") then - newarg[#newarg+1] = argument:gsub("^\"","") - if not argument:find("\"$") then + for index=1,#arg do + local argument = arg[index] + if find(argument,"^\"") then + newarg[#newarg+1] = gsub(argument,"^\"","") + if not find(argument,"\"$") then instring = true end - elseif argument:find("\"$") then - newarg[#newarg] = newarg[#newarg] .. " " .. argument:gsub("\"$","") + elseif find(argument,"\"$") then + newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") instring = false elseif instring then newarg[#newarg] = newarg[#newarg] .. " " .. argument @@ -3568,12 +4249,12 @@ function environment.luafilechunk(filename) -- used for loading lua bytecode in filename = file.replacesuffix(filename, "lua") local fullname = environment.luafile(filename) if fullname and fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading file %s", fullname) end return environment.loadedluacode(fullname) else - if trace_verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end return nil @@ -3593,7 +4274,7 @@ function environment.loadluafile(filename, version) -- when not overloaded by explicit suffix we look for a luc file first local fullname = (lucname and environment.luafile(lucname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check @@ -3611,7 +4292,7 @@ function environment.loadluafile(filename, version) if v == version then return true else - if trace_verbose then + if trace_locating then logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version) end environment.loadluafile(filename) @@ -3622,12 +4303,12 @@ function environment.loadluafile(filename, version) end fullname = (luaname and environment.luafile(luaname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check if not chunk then - if verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end else @@ -3645,7 +4326,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-inf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-inf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3670,6 +4351,14 @@ function statistics.hastimer(instance) return instance and instance.starttime end +function statistics.resettiming(instance) + if not instance then + notimer = { timing = 0, loadtime = 0 } + else + instance.timing, instance.loadtime = 0, 0 + end +end + function statistics.starttiming(instance) if not instance then notimer = { } @@ -3684,6 +4373,8 @@ function statistics.starttiming(instance) if not instance.loadtime then instance.loadtime = 0 end + else +--~ logs.report("system","nested timing (%s)",tostring(instance)) end instance.timing = it + 1 end @@ -3729,6 +4420,12 @@ function statistics.elapsedindeed(instance) return t > statistics.threshold end +function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds + if statistics.elapsedindeed(instance) then + return format("%s seconds %s", statistics.elapsedtime(instance),rest or "") + end +end + -- general function function statistics.register(tag,fnc) @@ -3807,14 +4504,32 @@ function statistics.timed(action,report) report("total runtime: %s",statistics.elapsedtime(timer)) end +-- where, not really the best spot for this: + +commands = commands or { } + +local timer + +function commands.resettimer() + statistics.resettiming(timer) + statistics.starttiming(timer) +end + +function commands.elapsedtime() + statistics.stoptiming(timer) + tex.sprint(statistics.elapsedtime(timer)) +end + +commands.resettimer() + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['luat-log'] = { +if not modules then modules = { } end modules ['trac-log'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-log.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3822,7 +4537,11 @@ if not modules then modules = { } end modules ['luat-log'] = { -- this is old code that needs an overhaul -local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format +--~ io.stdout:setvbuf("no") +--~ io.stderr:setvbuf("no") + +local write_nl, write = texio.write_nl or print, texio.write or io.write +local format, gmatch = string.format, string.gmatch local texcount = tex and tex.count if texlua then @@ -3903,25 +4622,48 @@ function logs.tex.line(fmt,...) -- new end end +--~ function logs.tex.start_page_number() +--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +--~ if real > 0 then +--~ if user > 0 then +--~ if sub > 0 then +--~ write(format("[%s.%s.%s",real,user,sub)) +--~ else +--~ write(format("[%s.%s",real,user)) +--~ end +--~ else +--~ write(format("[%s",real)) +--~ end +--~ else +--~ write("[-") +--~ end +--~ end + +--~ function logs.tex.stop_page_number() +--~ write("]") +--~ end + +local real, user, sub + function logs.tex.start_page_number() - local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno + real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +end + +function logs.tex.stop_page_number() if real > 0 then if user > 0 then if sub > 0 then - write(format("[%s.%s.%s",real,user,sub)) + logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub) else - write(format("[%s.%s",real,user)) + logs.report("pages", "flushing realpage %s, userpage %s",real,user) end else - write(format("[%s",real)) + logs.report("pages", "flushing realpage %s",real) end else - write("[-") + logs.report("pages", "flushing page") end -end - -function logs.tex.stop_page_number() - write("]") + io.flush() end logs.tex.report_job_stat = statistics.show_job_stat @@ -4021,7 +4763,7 @@ end function logs.setprogram(_name_,_banner_,_verbose_) name, banner = _name_, _banner_ if _verbose_ then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") end logs.set_method("tex") logs.report = report -- also used in libraries @@ -4034,9 +4776,9 @@ end function logs.setverbose(what) if what then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") else - trackers.disable("resolvers.verbose") + trackers.disable("resolvers.locating") end logs.verbose = what or false end @@ -4053,7 +4795,7 @@ logs.report = logs.tex.report logs.simple = logs.tex.report function logs.reportlines(str) -- todo: - for line in str:gmatch("(.-)[\n\r]") do + for line in gmatch(str,"(.-)[\n\r]") do logs.report(line) end end @@ -4064,8 +4806,12 @@ end logs.simpleline = logs.reportline -function logs.help(message,option) +function logs.reportbanner() -- for scripts too logs.report(banner) +end + +function logs.help(message,option) + logs.reportbanner() logs.reportline() logs.reportlines(message) local moreinfo = logs.moreinfo or "" @@ -4097,6 +4843,11 @@ end --~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") --~ end +function logs.fatal(where,...) + logs.report(where,"fatal error: %s, aborting now",format(...)) + os.exit() +end + end -- of closure @@ -4104,10 +4855,10 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", - comment = "companion to luat-lib.tex", } -- After a few years using the code the large luat-inp.lua file @@ -4119,7 +4870,7 @@ if not modules then modules = { } end modules ['data-inp'] = { -- * some public auxiliary functions were made private -- -- TODO: os.getenv -> os.env[] --- TODO: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller) +-- TODO: instances.[hashes,cnffiles,configurations,522] -- TODO: check escaping in find etc, too much, too slow -- This lib is multi-purpose and can be loaded again later on so that @@ -4140,12 +4891,13 @@ if not modules then modules = { } end modules ['data-inp'] = { local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys local next, type = next, type +local lpegmatch = lpeg.match -local trace_locating, trace_detail, trace_verbose = false, false, false +local trace_locating, trace_detail, trace_expansions = false, false, false -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) -trackers.register("resolvers.detail", function(v) trace_detail = v trackers.enable("resolvers.verbose,resolvers.detail") end) +trackers.register("resolvers.locating", function(v) trace_locating = v end) +trackers.register("resolvers.details", function(v) trace_detail = v end) +trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo if not resolvers then resolvers = { @@ -4169,7 +4921,7 @@ resolvers.generators.notfound = { nil } resolvers.cacheversion = '1.0.1' resolvers.cnfname = 'texmf.cnf' resolvers.luaname = 'texmfcnf.lua' -resolvers.homedir = os.env[os.platform == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' +resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}' local dummy_path_expr = "^!*unset/*$" @@ -4211,8 +4963,8 @@ suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' } alternatives['map files'] = 'map' alternatives['enc files'] = 'enc' -alternatives['cid files'] = 'cid' -alternatives['fea files'] = 'fea' +alternatives['cid maps'] = 'cid' -- great, why no cid files +alternatives['font feature files'] = 'fea' -- and fea files here alternatives['opentype fonts'] = 'otf' alternatives['truetype fonts'] = 'ttf' alternatives['truetype collections'] = 'ttc' @@ -4228,6 +4980,11 @@ formats ['sfd'] = 'SFDFONTS' suffixes ['sfd'] = { 'sfd' } alternatives['subfont definition files'] = 'sfd' +-- lib paths + +formats ['lib'] = 'CLUAINPUTS' -- new (needs checking) +suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' } + -- In practice we will work within one tds tree, but i want to keep -- the option open to build tools that look at multiple trees, which is -- why we keep the tree specific data in a table. We used to pass the @@ -4350,8 +5107,10 @@ local function check_configuration() -- not yet ok, no time for debugging now -- bad luck end fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm - fix("FONTFEATURES", ".;$TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") - fix("FONTCIDMAPS" , ".;$TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- this will go away some day + fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//") end @@ -4366,7 +5125,7 @@ function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail' end end -resolvers.settrace(os.getenv("MTX.resolvers.TRACE") or os.getenv("MTX_INPUT_TRACE")) +resolvers.settrace(os.getenv("MTX_INPUT_TRACE")) function resolvers.osenv(key) local ie = instance.environment @@ -4454,37 +5213,43 @@ end -- work that well; the parsing is ok, but dealing with the resulting -- table is a pain because we need to work inside-out recursively +local function do_first(a,b) + local t = { } + for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end + return "{" .. concat(t,",") .. "}" +end + +local function do_second(a,b) + local t = { } + for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end + return "{" .. concat(t,",") .. "}" +end + +local function do_both(a,b) + local t = { } + for sa in gmatch(a,"[^,]+") do + for sb in gmatch(b,"[^,]+") do + t[#t+1] = sa .. sb + end + end + return "{" .. concat(t,",") .. "}" +end + +local function do_three(a,b,c) + return a .. b.. c +end + local function splitpathexpr(str, t, validate) -- no need for further optimization as it is only called a - -- few times, we can use lpeg for the sub; we could move - -- the local functions outside the body + -- few times, we can use lpeg for the sub + if trace_expansions then + logs.report("fileio","expanding variable '%s'",str) + end t = t or { } str = gsub(str,",}",",@}") str = gsub(str,"{,","{@,") -- str = "@" .. str .. "@" local ok, done - local function do_first(a,b) - local t = { } - for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end - return "{" .. concat(t,",") .. "}" - end - local function do_second(a,b) - local t = { } - for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end - return "{" .. concat(t,",") .. "}" - end - local function do_both(a,b) - local t = { } - for sa in gmatch(a,"[^,]+") do - for sb in gmatch(b,"[^,]+") do - t[#t+1] = sa .. sb - end - end - return "{" .. concat(t,",") .. "}" - end - local function do_three(a,b,c) - return a .. b.. c - end while true do done = false while true do @@ -4515,6 +5280,11 @@ local function splitpathexpr(str, t, validate) t[#t+1] = s end end + if trace_expansions then + for k=1,#t do + logs.report("fileio","% 4i: %s",k,t[k]) + end + end return t end @@ -4554,18 +5324,27 @@ end -- also we now follow the stupid route: if not set then just assume *one* -- cnf file under texmf (i.e. distribution) -resolvers.ownpath = resolvers.ownpath or nil -resolvers.ownbin = resolvers.ownbin or arg[-2] or arg[-1] or arg[0] or "luatex" -resolvers.autoselfdir = true -- false may be handy for debugging +local args = environment and environment.original_arguments or arg -- this needs a cleanup + +resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" +resolvers.ownbin = gsub(resolvers.ownbin,"\\","/") function resolvers.getownpath() - if not resolvers.ownpath then - if resolvers.autoselfdir and os.selfdir then - resolvers.ownpath = os.selfdir - else - local binary = resolvers.ownbin - if os.platform == "windows" then - binary = file.replacesuffix(binary,"exe") + local ownpath = resolvers.ownpath or os.selfdir + if not ownpath or ownpath == "" or ownpath == "unset" then + ownpath = args[-1] or arg[-1] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + if not ownpath or ownpath == "" then + ownpath = args[-0] or arg[-0] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + end + local binary = resolvers.ownbin + if not ownpath or ownpath == "" then + ownpath = ownpath and file.dirname(binary) + end + if not ownpath or ownpath == "" then + if os.binsuffix ~= "" then + binary = file.replacesuffix(binary,os.binsuffix) end for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do local b = file.join(p,binary) @@ -4577,30 +5356,39 @@ function resolvers.getownpath() local olddir = lfs.currentdir() if lfs.chdir(p) then local pp = lfs.currentdir() - if trace_verbose and p ~= pp then - logs.report("fileio","following symlink %s to %s",p,pp) + if trace_locating and p ~= pp then + logs.report("fileio","following symlink '%s' to '%s'",p,pp) end - resolvers.ownpath = pp + ownpath = pp lfs.chdir(olddir) else - if trace_verbose then - logs.report("fileio","unable to check path %s",p) + if trace_locating then + logs.report("fileio","unable to check path '%s'",p) end - resolvers.ownpath = p + ownpath = p end break end end end - if not resolvers.ownpath then resolvers.ownpath = '.' end + if not ownpath or ownpath == "" then + ownpath = "." + logs.report("fileio","forcing fallback ownpath .") + elseif trace_locating then + logs.report("fileio","using ownpath '%s'",ownpath) + end + end + resolvers.ownpath = ownpath + function resolvers.getownpath() + return resolvers.ownpath end - return resolvers.ownpath + return ownpath end local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" } local function identify_own() - local ownpath = resolvers.getownpath() or lfs.currentdir() + local ownpath = resolvers.getownpath() or dir.current() local ie = instance.environment if ownpath then if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end @@ -4613,10 +5401,10 @@ local function identify_own() if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end - if trace_verbose then + if trace_locating then for i=1,#own_places do local v = own_places[i] - logs.report("fileio","variable %s set to %s",v,resolvers.env(v) or "unknown") + logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown") end end identify_own = function() end @@ -4648,10 +5436,8 @@ end local function load_cnf_file(fname) fname = resolvers.clean_path(fname) local lname = file.replacesuffix(fname,'lua') - local f = io.open(lname) - if f then -- this will go - f:close() - local dname = file.dirname(fname) + if lfs.isfile(lname) then + local dname = file.dirname(fname) -- fname ? if not instance.configuration[dname] then resolvers.load_data(dname,'configuration',lname and file.basename(lname)) instance.order[#instance.order+1] = instance.configuration[dname] @@ -4659,8 +5445,8 @@ local function load_cnf_file(fname) else f = io.open(fname) if f then - if trace_verbose then - logs.report("fileio","loading %s", fname) + if trace_locating then + logs.report("fileio","loading configuration file %s", fname) end local line, data, n, k, v local dname = file.dirname(fname) @@ -4694,14 +5480,16 @@ local function load_cnf_file(fname) end end f:close() - elseif trace_verbose then - logs.report("fileio","skipping %s", fname) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'", fname) end end end local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared) - for _,c in ipairs(instance.order) do + local order = instance.order + for i=1,#order do + local c = order[i] for k,v in next, c do if not instance.variables[k] then if instance.environment[k] then @@ -4717,19 +5505,24 @@ end function resolvers.load_cnf() local function loadoldconfigdata() - for _, fname in ipairs(instance.cnffiles) do - load_cnf_file(fname) + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + load_cnf_file(cnffiles[i]) end end -- instance.cnffiles contain complete names now ! + -- we still use a funny mix of cnf and new but soon + -- we will switch to lua exclusively as we only use + -- the file to collect the tree roots if #instance.cnffiles == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)") end else - instance.rootpath = instance.cnffiles[1] - for k,fname in ipairs(instance.cnffiles) do - instance.cnffiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local cnffiles = instance.cnffiles + instance.rootpath = cnffiles[1] + for k=1,#cnffiles do + instance.cnffiles[k] = file.collapse_path(cnffiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -4757,8 +5550,9 @@ function resolvers.load_lua() -- yet harmless else instance.rootpath = instance.luafiles[1] - for k,fname in ipairs(instance.luafiles) do - instance.luafiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local luafiles = instance.luafiles + for k=1,#luafiles do + instance.luafiles[k] = file.collapse_path(luafiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -4790,14 +5584,14 @@ end function resolvers.append_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash append: %s",tag) + logs.report("fileio","hash '%s' appended",tag) end insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } ) end function resolvers.prepend_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash prepend: %s",tag) + logs.report("fileio","hash '%s' prepended",tag) end insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } ) end @@ -4821,9 +5615,11 @@ end -- locators function resolvers.locatelists() - for _, path in ipairs(resolvers.clean_path_list('TEXMF')) do - if trace_verbose then - logs.report("fileio","locating list of %s",path) + local texmfpaths = resolvers.clean_path_list('TEXMF') + for i=1,#texmfpaths do + local path = texmfpaths[i] + if trace_locating then + logs.report("fileio","locating list of '%s'",path) end resolvers.locatedatabase(file.collapse_path(path)) end @@ -4836,11 +5632,11 @@ end function resolvers.locators.tex(specification) if specification and specification ~= '' and lfs.isdir(specification) then if trace_locating then - logs.report("fileio",'! tex locator found: %s',specification) + logs.report("fileio","tex locator '%s' found",specification) end resolvers.append_hash('file',specification,filename) elseif trace_locating then - logs.report("fileio",'? tex locator not found: %s',specification) + logs.report("fileio","tex locator '%s' not found",specification) end end @@ -4854,7 +5650,9 @@ function resolvers.loadfiles() instance.loaderror = false instance.files = { } if not instance.renewcache then - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for k=1,#hashes do + local hash = hashes[k] resolvers.hashdatabase(hash.tag,hash.name) if instance.loaderror then break end end @@ -4868,8 +5666,9 @@ end -- generators: function resolvers.loadlists() - for _, hash in ipairs(instance.hashes) do - resolvers.generatedatabase(hash.tag) + local hashes = instance.hashes + for i=1,#hashes do + resolvers.generatedatabase(hashes[i].tag) end end @@ -4881,10 +5680,27 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.patterns.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification - if trace_verbose then - logs.report("fileio","scanning path %s",specification) + if trace_locating then + logs.report("fileio","scanning path '%s'",specification) end instance.files[tag] = { } local files = instance.files[tag] @@ -4900,7 +5716,8 @@ function resolvers.generators.tex(specification) full = spec end for name in directory(full) do - if not weird:match(name) then + if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then @@ -4933,7 +5750,7 @@ function resolvers.generators.tex(specification) end end action() - if trace_verbose then + if trace_locating then logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r) end end @@ -4948,11 +5765,48 @@ end -- we join them and split them after the expansion has taken place. This -- is more convenient. +--~ local checkedsplit = string.checkedsplit + +local cache = { } + +local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;"))) + +local function split_kpse_path(str) -- beware, this can be either a path or a {specification} + local found = cache[str] + if not found then + if str == "" then + found = { } + else + str = gsub(str,"\\","/") +--~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator) +local split = lpegmatch(splitter,str) + found = { } + for i=1,#split do + local s = split[i] + if not find(s,"^{*unset}*") then + found[#found+1] = s + end + end + if trace_expansions then + logs.report("fileio","splitting path specification '%s'",str) + for k=1,#found do + logs.report("fileio","% 4i: %s",k,found[k]) + end + end + cache[str] = found + end + end + return found +end + +resolvers.split_kpse_path = split_kpse_path + function resolvers.splitconfig() - for i,c in ipairs(instance) do - for k,v in pairs(c) do + for i=1,#instance do + local c = instance[i] + for k,v in next, c do if type(v) == 'string' then - local t = file.split_path(v) + local t = split_kpse_path(v) if #t > 1 then c[k] = t end @@ -4962,21 +5816,25 @@ function resolvers.splitconfig() end function resolvers.joinconfig() - for i,c in ipairs(instance.order) do - for k,v in pairs(c) do -- ipairs? + local order = instance.order + for i=1,#order do + local c = order[i] + for k,v in next, c do -- indexed? if type(v) == 'table' then c[k] = file.join_path(v) end end end end + function resolvers.split_path(str) if type(str) == 'table' then return str else - return file.split_path(str) + return split_kpse_path(str) end end + function resolvers.join_path(str) if type(str) == 'table' then return file.join_path(str) @@ -4988,8 +5846,9 @@ end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do - local t, h = { }, { } - for _,vv in ipairs(file.split_path(v)) do + local t, h, p = { }, { }, split_kpse_path(v) + for kk=1,#p do + local vv = p[kk] if vv ~= "" and not h[vv] then t[#t+1] = vv h[vv] = true @@ -5036,11 +5895,15 @@ function resolvers.serialize(files) end t[#t+1] = "return {" if instance.sortdata then - for _, k in pairs(sortedkeys(files)) do -- ipairs + local sortedfiles = sortedkeys(files) + for i=1,#sortedfiles do + local k = sortedfiles[i] local fk = files[k] if type(fk) == 'table' then t[#t+1] = "\t['" .. k .. "']={" - for _, kk in pairs(sortedkeys(fk)) do -- ipairs + local sortedfk = sortedkeys(fk) + for j=1,#sortedfk do + local kk = sortedfk[j] t[#t+1] = dump(kk,fk[kk],"\t\t") end t[#t+1] = "\t}," @@ -5065,12 +5928,18 @@ function resolvers.serialize(files) return concat(t,"\n") end +local data_state = { } + +function resolvers.data_state() + return data_state or { } +end + function resolvers.save_data(dataname, makename) -- untested without cache overload for cachename, files in next, instance[dataname] do local name = (makename or file.join)(cachename,dataname) local luaname, lucname = name .. ".lua", name .. ".luc" - if trace_verbose then - logs.report("fileio","preparing %s for %s",dataname,cachename) + if trace_locating then + logs.report("fileio","preparing '%s' for '%s'",dataname,cachename) end for k, v in next, files do if type(v) == "table" and #v == 1 then @@ -5084,24 +5953,25 @@ function resolvers.save_data(dataname, makename) -- untested without cache overl date = os.date("%Y-%m-%d"), time = os.date("%H:%M:%S"), content = files, + uuid = os.uuid(), } local ok = io.savedata(luaname,resolvers.serialize(data)) if ok then - if trace_verbose then - logs.report("fileio","%s saved in %s",dataname,luaname) + if trace_locating then + logs.report("fileio","'%s' saved in '%s'",dataname,luaname) end if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip - if trace_verbose then - logs.report("fileio","%s compiled to %s",dataname,lucname) + if trace_locating then + logs.report("fileio","'%s' compiled to '%s'",dataname,lucname) end else - if trace_verbose then - logs.report("fileio","compiling failed for %s, deleting file %s",dataname,lucname) + if trace_locating then + logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname) end os.remove(lucname) end - elseif trace_verbose then - logs.report("fileio","unable to save %s in %s (access error)",dataname,luaname) + elseif trace_locating then + logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname) end end end @@ -5113,19 +5983,20 @@ function resolvers.load_data(pathname,dataname,filename,makename) -- untested wi if blob then local data = blob() if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then - if trace_verbose then - logs.report("fileio","loading %s for %s from %s",dataname,pathname,filename) + data_state[#data_state+1] = data.uuid + if trace_locating then + logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = data.content else - if trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + if trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + elseif trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end end @@ -5144,15 +6015,17 @@ function resolvers.resetconfig() end function resolvers.loadnewconfig() - for _, cnf in ipairs(instance.luafiles) do + local luafiles = instance.luafiles + for i=1,#luafiles do + local cnf = luafiles[i] local pathname = file.dirname(cnf) local filename = file.join(pathname,resolvers.luaname) local blob = loadfile(filename) if blob then local data = blob() if data then - if trace_verbose then - logs.report("fileio","loading configuration file %s",filename) + if trace_locating then + logs.report("fileio","loading configuration file '%s'",filename) end if true then -- flatten to variable.progname @@ -5173,14 +6046,14 @@ function resolvers.loadnewconfig() instance['setup'][pathname] = data end else - if trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + if trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance['setup'][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance.order[#instance.order+1] = instance.setup[pathname] if instance.loaderror then break end @@ -5189,7 +6062,9 @@ end function resolvers.loadoldconfig() if not instance.renewcache then - for _, cnf in ipairs(instance.cnffiles) do + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + local cnf = cnffiles[i] local dname = file.dirname(cnf) resolvers.load_data(dname,'configuration') instance.order[#instance.order+1] = instance.configuration[dname] @@ -5379,7 +6254,7 @@ end function resolvers.expanded_path_list(str) if not str then - return ep or { } + return ep or { } -- ep ? elseif instance.savelists then -- engine+progname hash str = gsub(str,"%$","") @@ -5397,9 +6272,9 @@ end function resolvers.expanded_path_list_from_var(str) -- brrr local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$","")) if tmp ~= "" then - return resolvers.expanded_path_list(str) - else return resolvers.expanded_path_list(tmp) + else + return resolvers.expanded_path_list(str) end end @@ -5446,9 +6321,9 @@ function resolvers.isreadable.file(name) local readable = lfs.isfile(name) -- brrr if trace_detail then if readable then - logs.report("fileio","+ readable: %s",name) + logs.report("fileio","file '%s' is readable",name) else - logs.report("fileio","- readable: %s", name) + logs.report("fileio","file '%s' is not readable", name) end end return readable @@ -5464,7 +6339,7 @@ local function collect_files(names) for k=1,#names do local fname = names[k] if trace_detail then - logs.report("fileio","? blobpath asked: %s",fname) + logs.report("fileio","checking name '%s'",fname) end local bname = file.basename(fname) local dname = file.dirname(fname) @@ -5480,7 +6355,7 @@ local function collect_files(names) local files = blobpath and instance.files[blobpath] if files then if trace_detail then - logs.report("fileio",'? blobpath do: %s (%s)',blobpath,bname) + logs.report("fileio","deep checking '%s' (%s)",blobpath,bname) end local blobfile = files[bname] if not blobfile then @@ -5514,7 +6389,7 @@ local function collect_files(names) end end elseif trace_locating then - logs.report("fileio",'! blobpath no: %s (%s)',blobpath,bname) + logs.report("fileio","no match in '%s' (%s)",blobpath,bname) end end end @@ -5564,14 +6439,13 @@ end local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc) local result = collected or { } local stamp = nil - filename = file.collapse_path(filename) -- elsewhere - filename = file.collapse_path(gsub(filename,"\\","/")) -- elsewhere + filename = file.collapse_path(filename) -- speed up / beware: format problem if instance.remember then stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format if instance.found[stamp] then if trace_locating then - logs.report("fileio",'! remembered: %s',filename) + logs.report("fileio","remembering file '%s'",filename) end return instance.found[stamp] end @@ -5579,7 +6453,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not dangerous[instance.format or "?"] then if resolvers.isreadable.file(filename) then if trace_detail then - logs.report("fileio",'= found directly: %s',filename) + logs.report("fileio","file '%s' found directly",filename) end instance.found[stamp] = { filename } return { filename } @@ -5587,13 +6461,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end if find(filename,'%*') then if trace_locating then - logs.report("fileio",'! wildcard: %s', filename) + logs.report("fileio","checking wildcard '%s'", filename) end result = resolvers.find_wildcard_files(filename) elseif file.is_qualified_path(filename) then if resolvers.isreadable.file(filename) then if trace_locating then - logs.report("fileio",'! qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end result = { filename } else @@ -5603,7 +6477,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. ".tex" if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing standard filetype: tex') + logs.report("fileio","no suffix, forcing standard filetype 'tex'") end result, ok = { forcedname }, true end @@ -5613,7 +6487,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. "." .. s if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing format filetype: %s', s) + logs.report("fileio","no suffix, forcing format filetype '%s'", s) end result, ok = { forcedname }, true break @@ -5625,7 +6499,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- try to find in tree (no suffix manipulation), here we search for the -- matching last part of the name local basename = file.basename(filename) - local pattern = (filename .. "$"):gsub("([%.%-])","%%%1") + local pattern = gsub(filename .. "$","([%.%-])","%%%1") local savedformat = instance.format local format = savedformat or "" if format == "" then @@ -5635,19 +6509,21 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan instance.format = "othertextfiles" -- kind of everything, maybe texinput is better end -- - local resolved = collect_instance_files(basename) - if #result == 0 then - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered) + if basename ~= filename then + local resolved = collect_instance_files(basename) + if #result == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered) + end end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if rr:find(pattern) then - result[#result+1], ok = rr, true + resolvers.format = savedformat + -- + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1], ok = rr, true + end end end -- a real wildcard: @@ -5656,14 +6532,14 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- local filelist = collect_files({basename}) -- for f=1,#filelist do -- local ff = filelist[f][3] or "" - -- if ff:find(pattern) then + -- if find(ff,pattern) then -- result[#result+1], ok = ff, true -- end -- end -- end end if not ok and trace_locating then - logs.report("fileio",'? qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end end else @@ -5682,12 +6558,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan wantedfiles[#wantedfiles+1] = forcedname filetype = resolvers.format_of_suffix(forcedname) if trace_locating then - logs.report("fileio",'! forcing filetype: %s',filetype) + logs.report("fileio","forcing filetype '%s'",filetype) end else filetype = resolvers.format_of_suffix(filename) if trace_locating then - logs.report("fileio",'! using suffix based filetype: %s',filetype) + logs.report("fileio","using suffix based filetype '%s'",filetype) end end else @@ -5699,7 +6575,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end filetype = instance.format if trace_locating then - logs.report("fileio",'! using given filetype: %s',filetype) + logs.report("fileio","using given filetype '%s'",filetype) end end local typespec = resolvers.variable_of_format(filetype) @@ -5707,9 +6583,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then - logs.report("fileio",'? filename: %s',filename) - logs.report("fileio",'? filetype: %s',filetype or '?') - logs.report("fileio",'? wanted files: %s',concat(wantedfiles," | ")) + logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end for k=1,#wantedfiles do local fname = wantedfiles[k] @@ -5730,36 +6604,59 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan else -- list search local filelist = collect_files(wantedfiles) - local doscan, recurse + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = file.dirname(filelist[i][2]) .. "/" + end + end if trace_detail then - logs.report("fileio",'? filename: %s',filename) + logs.report("fileio","checking filename '%s'",filename) end -- a bit messy ... esp the doscan setting here + local doscan for k=1,#pathlist do local path = pathlist[k] if find(path,"^!!") then doscan = false else doscan = true end - if find(path,"//$") then recurse = true else recurse = false end local pathname = gsub(path,"^!+", '') done = false -- using file list - if filelist and not (done and not instance.allresults) and recurse then - -- compare list entries with permitted pattern - pathname = gsub(pathname,"([%-%.])","%%%1") -- this also influences - pathname = gsub(pathname,"/+$", '/.*') -- later usage of pathname - pathname = gsub(pathname,"//", '/.-/') -- not ok for /// but harmless - local expr = "^" .. pathname + if filelist then + local expression + -- compare list entries with permitted pattern -- /xx /xx// + if not find(pathname,"/$") then + expression = pathname .. "/" + else + expression = pathname + end + expression = gsub(expression,"([%-%.])","%%%1") -- this also influences + expression = gsub(expression,"//+$", '/.*') -- later usage of pathname + expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless + expression = "^" .. expression .. "$" + if trace_detail then + logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname) + end for k=1,#filelist do local fl = filelist[k] local f = fl[2] - if find(f,expr) then - if trace_detail then - logs.report("fileio",'= found in hash: %s',f) - end + local d = dirlist[k] + if find(d,expression) then --- todo, test for readable result[#result+1] = fl[3] resolvers.register_in_trees(f) -- for tracing used files done = true - if not instance.allresults then break end + if instance.allresults then + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d) + end + else + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d) + end + break + end + elseif trace_detail then + logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d) end end end @@ -5775,7 +6672,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan local fname = file.join(ppname,w) if resolvers.isreadable.file(fname) then if trace_detail then - logs.report("fileio",'= found by scanning: %s',fname) + logs.report("fileio","found '%s' by scanning",fname) end result[#result+1] = fname done = true @@ -5838,7 +6735,7 @@ function resolvers.find_given_files(filename) local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] - local files = instance.files[hash.tag] + local files = instance.files[hash.tag] or { } local blist = files[bname] if not blist then local rname = "remap:"..bname @@ -5948,9 +6845,9 @@ function resolvers.load(option) statistics.starttiming(instance) resolvers.resetconfig() resolvers.identify_cnf() - resolvers.load_lua() + resolvers.load_lua() -- will become the new method resolvers.expand_variables() - resolvers.load_cnf() + resolvers.load_cnf() -- will be skipped when we have a lua file resolvers.expand_variables() if option ~= "nofiles" then resolvers.load_hash() @@ -5962,22 +6859,23 @@ end function resolvers.for_files(command, files, filetype, mustexist) if files and #files > 0 then local function report(str) - if trace_verbose then + if trace_locating then logs.report("fileio",str) -- has already verbose else print(str) end end - if trace_verbose then - report('') + if trace_locating then + report('') -- ? end - for _, file in ipairs(files) do + for f=1,#files do + local file = files[f] local result = command(file,filetype,mustexist) if type(result) == 'string' then report(result) else - for _,v in ipairs(result) do - report(v) + for i=1,#result do + report(result[i]) -- could be unpack end end end @@ -6024,18 +6922,19 @@ end function table.sequenced(t,sep) -- temp here local s = { } - for k, v in pairs(t) do -- pairs? - s[#s+1] = k .. "=" .. v + for k, v in next, t do -- indexed? + s[#s+1] = k .. "=" .. tostring(v) end return concat(s, sep or " | ") end function resolvers.methodhandler(what, filename, filetype) -- ... + filename = file.collapse_path(filename) local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb local scheme = specification.scheme if resolvers[what][scheme] then if trace_locating then - logs.report("fileio",'= handler: %s -> %s -> %s',specification.original,what,table.sequenced(specification)) + logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) end return resolvers[what][scheme](filename,filetype) -- todo: specification else @@ -6055,8 +6954,9 @@ function resolvers.clean_path(str) end function resolvers.do_with_path(name,func) - for _, v in pairs(resolvers.expanded_path_list(name)) do -- pairs? - func("^"..resolvers.clean_path(v)) + local pathlist = resolvers.expanded_path_list(name) + for i=1,#pathlist do + func("^"..resolvers.clean_path(pathlist[i])) end end @@ -6065,7 +6965,9 @@ function resolvers.do_with_var(name,func) end function resolvers.with_files(pattern,handle) - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for i=1,#hashes do + local hash = hashes[i] local blobpath = hash.tag local blobtype = hash.type if blobpath then @@ -6080,7 +6982,7 @@ function resolvers.with_files(pattern,handle) if type(v) == "string" then handle(blobtype,blobpath,v,k) else - for _,vv in pairs(v) do -- ipairs? + for _,vv in next, v do -- indexed handle(blobtype,blobpath,vv,k) end end @@ -6092,7 +6994,7 @@ function resolvers.with_files(pattern,handle) end function resolvers.locate_format(name) - local barename, fmtname = name:gsub("%.%a+$",""), "" + local barename, fmtname = gsub(name,"%.%a+$",""), "" if resolvers.usecache then local path = file.join(caches.setpath("formats")) -- maybe platform fmtname = file.join(path,barename..".fmt") or "" @@ -6140,7 +7042,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6164,7 +7066,7 @@ luatools with a recache feature.

local format, lower, gsub = string.format, string.lower, string.gsub -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet caches = caches or { } @@ -6251,7 +7153,8 @@ function caches.setpath(...) caches.path = '.' end caches.path = resolvers.clean_path(caches.path) - if not table.is_empty({...}) then + local dirs = { ... } + if #dirs > 0 then local pth = dir.mkdirs(caches.path,...) return pth end @@ -6297,6 +7200,7 @@ function caches.savedata(filepath,filename,data,raw) if raw then reduce, simplify = false, false end + data.cache_uuid = os.uuid() if caches.direct then file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex else @@ -6322,7 +7226,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6343,7 +7247,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-out'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6359,7 +7263,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-con'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6370,8 +7274,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) --[[ldx--

Once we found ourselves defining similar cache constructs @@ -6435,7 +7337,7 @@ end function containers.is_valid(container, name) if name and name ~= "" then local storage = container.storage[name] - return storage and not table.is_empty(storage) and storage.cache_version == container.version + return storage and storage.cache_version == container.version else return false end @@ -6487,16 +7389,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-use'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, lower, gsub = string.format, string.lower, string.gsub +local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -- since we want to use the cache instead of the tree, we will now -- reimplement the saver. @@ -6540,19 +7441,20 @@ resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT')) - if table.is_empty(mountpaths) and usecache then + if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = { caches.setpath("mount") } end - if not table.is_empty(mountpaths) then + if mountpaths and #mountpaths > 0 then statistics.starttiming(resolvers.instance) - for k, root in pairs(mountpaths) do + for k=1,#mountpaths do + local root = mountpaths[k] local f = io.open(root.."/url.tmi") if f then for line in f:lines() do if line then - if line:find("^[%%#%-]") then -- or %W + if find(line,"^[%%#%-]") then -- or %W -- skip - elseif line:find("^zip://") then + elseif find(line,"^zip://") then if trace_locating then logs.report("fileio","mounting %s",line) end @@ -6597,11 +7499,13 @@ function statistics.check_fmt_status(texname) local luv = dofile(luvname) if luv and luv.sourcefile then local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown") - if luv.enginebanner and luv.enginebanner ~= enginebanner then - return "engine mismatch" + local luvbanner = luv.enginebanner or "?" + if luvbanner ~= enginebanner then + return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner) end - if luv.sourcehash and luv.sourcehash ~= sourcehash then - return "source mismatch" + local luvhash = luv.sourcehash or "?" + if luvhash ~= sourcehash then + return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash) end else return "invalid status file" @@ -6727,7 +7631,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6735,47 +7639,47 @@ if not modules then modules = { } end modules ['data-aux'] = { local find = string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") local oldscript = resolvers.clean_path(oldname) - if trace_verbose then + if trace_locating then logs.report("fileio","to be replaced old script %s", oldscript) end local newscripts = resolvers.find_files(newname) or { } if #newscripts == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","unable to locate new script") end else for i=1,#newscripts do local newscript = resolvers.clean_path(newscripts[i]) - if trace_verbose then + if trace_locating then logs.report("fileio","checking new script %s", newscript) end if oldscript == newscript then - if trace_verbose then + if trace_locating then logs.report("fileio","old and new script are the same") end elseif not find(newscript,scriptpath) then - if trace_verbose then + if trace_locating then logs.report("fileio","new script should come from %s",scriptpath) end elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then - if trace_verbose then + if trace_locating then logs.report("fileio","invalid new script name") end else local newdata = io.loaddata(newscript) if newdata then - if trace_verbose then + if trace_locating then logs.report("fileio","old script content replaced by new content") end io.savedata(oldscript,newdata) break - elseif trace_verbose then + elseif trace_locating then logs.report("fileio","unable to load new script") end end @@ -6790,7 +7694,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-lst'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6814,7 +7718,9 @@ local function list(list,report) local instance = resolvers.instance local pat = upper(pattern or "","") local report = report or texio.write_nl - for _,key in pairs(table.sortedkeys(list)) do + local sorted = table.sortedkeys(list) + for i=1,#sorted do + local key = sorted[i] if instance.pattern == "" or find(upper(key),pat) then if instance.kpseonly then if instance.kpsevars[key] then @@ -6833,11 +7739,14 @@ function resolvers.listers.expansions() list(resolvers.instance.expansions) end function resolvers.listers.configurations(report) local report = report or texio.write_nl local instance = resolvers.instance - for _,key in ipairs(table.sortedkeys(instance.kpsevars)) do + local sorted = table.sortedkeys(instance.kpsevars) + for i=1,#sorted do + local key = sorted[i] if not instance.pattern or (instance.pattern=="") or find(key,instance.pattern) then report(format("%s\n",key)) - for i,c in ipairs(instance.order) do - local str = c[key] + local order = instance.order + for i=1,#order do + local str = order[i][key] if str then report(format("\t%s\t%s",i,str)) end @@ -6943,7 +7852,7 @@ if not resolvers then os.exit() end -logs.setprogram('LuaTools',"TDS Management Tool 1.31",environment.arguments["verbose"] or false) +logs.setprogram('LuaTools',"TDS Management Tool 1.32",environment.arguments["verbose"] or false) local instance = resolvers.reset() @@ -7000,6 +7909,12 @@ end if environment.arguments["trace"] then resolvers.settrace(environment.arguments["trace"]) end +local trackspec = environment.argument("trackers") or environment.argument("track") + +if trackspec then + trackers.enable(trackspec) +end + runners = runners or { } messages = messages or { } @@ -7033,6 +7948,7 @@ messages.help = [[ --engine=str target engine --progname=str format or backend --pattern=str filter variables +--trackers=list enable given trackers ]] function runners.make_format(texname) @@ -7091,8 +8007,9 @@ function runners.make_format(texname) logs.simple("using uncompiled initialization file: %s",luaname) end else - for _, v in pairs({instance.luaname, instance.progname, barename}) do - v = string.gsub(v..".lua","%.lua%.lua$",".lua") + local what = { instance.luaname, instance.progname, barename } + for k=1,#what do + local v = string.gsub(what[k]..".lua","%.lua%.lua$",".lua") if v and (v ~= "") then luaname = resolvers.find_files(v)[1] or "" if luaname ~= "" then @@ -7116,7 +8033,8 @@ function runners.make_format(texname) logs.simple("using lua initialization file: %s",luaname) local mp = dir.glob(file.removesuffix(file.basename(luaname)).."-*.mem") if mp and #mp > 0 then - for _, name in ipairs(mp) do + for i=1,#mp do + local name = mp[i] logs.simple("removing related mplib format %s", file.basename(name)) os.remove(name) end diff --git a/Master/texmf-dist/scripts/context/lua/luatools.rme b/Master/texmf-dist/scripts/context/lua/luatools.rme index b320e1184b5..901e9a9a3fc 100644 --- a/Master/texmf-dist/scripts/context/lua/luatools.rme +++ b/Master/texmf-dist/scripts/context/lua/luatools.rme @@ -1,3 +1,3 @@ On MSWindows the luatools.lua script is called -with luatools.cmd. On Unix you can either rename +with luatools.exe. On Unix you can either rename luatools.lua to luatools, or use a symlink. diff --git a/Master/texmf-dist/scripts/context/lua/mtx-babel.lua b/Master/texmf-dist/scripts/context/lua/mtx-babel.lua index e241e933454..01e2ba4b223 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-babel.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-babel.lua @@ -415,7 +415,7 @@ do end -logs.extendbanner("Babel Conversion Tools 1.2",true) +logs.extendbanner("Babel Input To UTF Conversion 1.20",true) messages.help = [[ --language=string conversion language (e.g. greek) diff --git a/Master/texmf-dist/scripts/context/lua/mtx-cache.lua b/Master/texmf-dist/scripts/context/lua/mtx-cache.lua index a1fbed8255d..c2a0db00d89 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-cache.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-cache.lua @@ -22,9 +22,11 @@ function scripts.cache.collect_two(...) return path, rest end +local suffixes = { "afm", "tfm", "def", "enc", "otf", "mp", "data" } + function scripts.cache.process_one(action) - for k, v in ipairs({ "afm", "tfm", "def", "enc", "otf", "mp", "data" }) do - action("fonts", v) + for i=1,#suffixes do + action("fonts", suffixes[i]) end end @@ -35,13 +37,10 @@ end -- todo: recursive delete of paths function scripts.cache.remove(list,keep) - local keepsuffixes = { } - for _, v in ipairs(keep or {}) do - keepsuffixes[v] = true - end - local n = 0 - for _,filename in ipairs(list) do - if filename:find("luatex%-cache") then -- safeguard + local n, keepsuffixes = 0, table.tohash(keep or { }) + for i=1,#list do + local filename = list[i] + if string.find(filename,"luatex%-cache") then -- safeguard if not keepsuffixes[file.extname(filename) or ""] then os.remove(filename) n = n + 1 @@ -76,7 +75,7 @@ function scripts.cache.list(all) end) end -logs.extendbanner("Cache Tools 0.10") +logs.extendbanner("ConTeXt & MetaTeX Cache Management 0.10") messages.help = [[ --purge remove not used files diff --git a/Master/texmf-dist/scripts/context/lua/mtx-chars.lua b/Master/texmf-dist/scripts/context/lua/mtx-chars.lua index d4330ca304d..6acacfbd22e 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-chars.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-chars.lua @@ -11,17 +11,17 @@ local format, concat, utfchar, upper = string.format, table.concat, unicode.utf8 scripts = scripts or { } scripts.chars = scripts.chars or { } -local banner = [[ --- filename : char-mth.lua --- comment : companion to char-mth.tex (in ConTeXt) --- author : Hans Hagen, PRAGMA-ADE, Hasselt NL --- license : see context related readme files --- comment : generated from data file downloaded from STIX website - -if not versions then versions = { } end versions['char-mth'] = 1.001 -if not characters then characters = { } end -]] - +--~ local banner = [[ +--~ -- filename : char-mth.lua +--~ -- comment : companion to char-mth.tex (in ConTeXt) +--~ -- author : Hans Hagen, PRAGMA-ADE, Hasselt NL +--~ -- license : see context related readme files +--~ -- comment : generated from data file downloaded from STIX website +--~ +--~ if not versions then versions = { } end versions['char-mth'] = 1.001 +--~ if not characters then characters = { } end +--~ ]] +--~ --~ function scripts.chars.stixtomkiv(inname,outname) --~ if inname == "" then --~ logs.report("aquiring math data","invalid datafilename") @@ -248,7 +248,7 @@ function scripts.chars.makeencoutf() end local f = open("xetx-cls.tex",banner_utf_classes) if f then - for k, v in pairs(xtxclasses) do + for k, v in next, xtxclasses do f:write(format("\\defineXTXcharinjectionclass[lb:%s]\n",k)) end f:write("\n") @@ -301,7 +301,7 @@ function scripts.chars.makeencoutf() end end -logs.extendbanner("Character Tools 0.10") +logs.extendbanner("MkII Character Table Generators 0.10") messages.help = [[ --stix convert stix table to math table diff --git a/Master/texmf-dist/scripts/context/lua/mtx-check.lua b/Master/texmf-dist/scripts/context/lua/mtx-check.lua index 6be7f276597..4266ddf0d46 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-check.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-check.lua @@ -40,6 +40,8 @@ do local l_s, r_s = P("["), P("]") local l_g, r_g = P("{"), P("}") + local okay = lpeg.P("{[}") + lpeg.P("{]}") + local esc = P("\\") local cr = P("\r") local lf = P("\n") @@ -72,7 +74,7 @@ do ["tokens"] = (V("ignore") + V("whatever") + V("grouped") + V("setup") + V("display") + V("inline") + V("errors") + 1)^0, ["whatever"] = line + esc * 1 + C(P("%") * (1-line)^0), ["grouped"] = l_g * (V("whatever") + V("grouped") + V("setup") + V("display") + V("inline") + (1 - l_g - r_g))^0 * r_g, - ["setup"] = l_s * (V("whatever") + V("grouped") + V("setup") + V("display") + V("inline") + (1 - l_s - r_s))^0 * r_s, + ["setup"] = l_s * (okay + V("whatever") + V("grouped") + V("setup") + V("display") + V("inline") + (1 - l_s - r_s))^0 * r_s, ["display"] = d_m * (V("whatever") + V("grouped") + (1 - d_m))^0 * d_m, ["inline"] = i_m * (V("whatever") + V("grouped") + (1 - i_m))^0 * i_m, ["errors"] = (V("gerror")+ V("serror") + V("derror") + V("ierror")), @@ -103,8 +105,10 @@ function scripts.checker.check(filename) local str = io.loaddata(filename) if str then validator.check(str) - if #validator.errors > 0 then - for k, v in ipairs(validator.errors) do + local errors = validator.errors + if #errors > 0 then + for k=1,#errors do + local v = errors[k] local kind, position, line = v[1], v[2], v[3] local data = str:sub(position-30,position+30) data = data:gsub("(.)", { @@ -123,7 +127,7 @@ function scripts.checker.check(filename) end end -logs.extendbanner("Syntax Checking 0.10",true) +logs.extendbanner("Basic ConTeXt Syntax Checking 0.10",true) messages.help = [[ --convert check tex file for errors diff --git a/Master/texmf-dist/scripts/context/lua/mtx-context.lua b/Master/texmf-dist/scripts/context/lua/mtx-context.lua index 71d2eee5611..79e74e407ff 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-context.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-context.lua @@ -66,7 +66,7 @@ do function ctxrunner.reflag(flags) local t = { } - for _, flag in pairs(flags) do + for _, flag in next, flags do local key, value = flag:match("^(.-)=(.+)$") if key and value then t[key] = value @@ -122,22 +122,24 @@ do return end end - if table.is_empty(ctxdata.prepfiles) then - logs.simple("nothing prepared, no ctl file saved") - os.remove(ctlname) - else + local prepfiles = ctxdata.prepfiles + if prepfiles and next(prepfiles) then logs.simple("saving logdata in: %s",ctlname) f = io.open(ctlname,'w') if f then f:write("\n\n") f:write(string.format("\n",yn(ctxdata.runlocal))) ---~ for name, value in pairs(ctxdata.prepfiles) do - for _, name in ipairs(table.sortedkeys(ctxdata.prepfiles)) do - f:write(string.format("\t%s\n",yn(ctxdata.prepfiles[name]),name)) + local sorted = table.sortedkeys(prepfiles) + for i=1,#sorted do + local name = sorted[i] + f:write(string.format("\t%s\n",yn(prepfiles[name]),name)) end f:write("\n") f:close() end + else + logs.simple("nothing prepared, no ctl file saved") + os.remove(ctlname) end end @@ -153,7 +155,7 @@ do print(table.serialize(ctxdata.modules)) print(table.serialize(ctxdata.filters)) print(table.serialize(ctxdata.modes)) - print(xml.serialize(ctxdata.xmldata)) + print(xml.tostring(ctxdata.xmldata)) end function ctxrunner.manipulate(ctxdata,ctxname,defaultname) @@ -180,7 +182,7 @@ do local found = lfs.isfile(usedname) if not found then - for _, path in pairs(ctxdata.locations) do + for _, path in next, ctxdata.locations do local fullname = file.join(path,ctxdata.ctxname) if lfs.isfile(fullname) then usedname, found = fullname, true @@ -189,6 +191,9 @@ do end end + usedname = resolvers.find_file(ctxdata.ctxname,"tex") + found = usedname ~= "" + if not found and defaultname and defaultname ~= "" and lfs.isfile(defaultname) then usedname, found = defaultname, true end @@ -218,40 +223,40 @@ do ctxdata.flags = ctxrunner.reflag(ctxdata.flags) - for _, message in ipairs(ctxdata.messages) do - logs.simple("ctx comment: %s", xml.tostring(message)) + local messages = ctxdata.messages + for i=1,#messages do + logs.simple("ctx comment: %s", xml.tostring(messages[i])) end - xml.each(ctxdata.xmldata,"ctx:value[@name='job']", function(ek,e,k) - e[k] = ctxdata.variables['job'] or "" - end) + for r, d, k in xml.elements(ctxdata.xmldata,"ctx:value[@name='job']") do + d[k] = ctxdata.variables['job'] or "" + end local commands = { } - xml.each(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/ctx:processor", function(r,d,k) - local ek = d[k] - commands[ek.at and ek.at['name'] or "unknown"] = ek - end) + for e in xml.collected(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/ctx:processor") do + commands[e.at and e.at['name'] or "unknown"] = e + end - local suffix = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/attribute(suffix)") or ctxdata.suffix - local runlocal = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/attribute(local)") + local suffix = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/attribute('suffix')") or ctxdata.suffix + local runlocal = xml.filter(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:processors/attribute('local')") runlocal = toboolean(runlocal) - for _, files in ipairs(xml.filters.elements(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:files")) do - for _, pattern in ipairs(xml.filters.elements(files,"ctx:file")) do + for files in xml.collected(ctxdata.xmldata,"/ctx:job/ctx:preprocess/ctx:files") do + for pattern in xml.collected(files,"ctx:file") do preprocessor = pattern.at['processor'] or "" if preprocessor ~= "" then ctxdata.variables['old'] = ctxdata.jobname - xml.each(ctxdata.xmldata,"ctx:value", function(r,d,k) + for r, d, k in xml.elements(ctxdata.xmldata,"ctx:value") do local ek = d[k] local ekat = ek.at['name'] if ekat == 'old' then d[k] = ctxrunner.substitute(ctxdata.variables[ekat] or "") end - end) + end pattern = ctxrunner.justtext(xml.tostring(pattern)) @@ -260,7 +265,9 @@ do local pluspath = false if #oldfiles == 0 then -- message: no files match pattern - for _, p in ipairs(ctxdata.paths) do + local paths = ctxdata.paths + for i=1,#paths do + local p = paths[i] local oldfiles = dir.glob(path.join(p,pattern)) if #oldfiles > 0 then pluspath = true @@ -271,15 +278,18 @@ do if #oldfiles == 0 then -- message: no old files else - for _, oldfile in ipairs(oldfiles) do - newfile = oldfile .. "." .. suffix -- addsuffix will add one only + for i=1,#oldfiles do + local oldfile = oldfiles[i] + local newfile = oldfile .. "." .. suffix -- addsuffix will add one only if ctxdata.runlocal then newfile = file.basename(newfile) end if oldfile ~= newfile and file.needsupdate(oldfile,newfile) then -- message: oldfile needs preprocessing -- os.remove(newfile) - for _, pp in ipairs(preprocessor:split(',')) do + local splitted = preprocessor:split(',') + for i=1,#splitted do + local pp = splitted[i] local command = commands[pp] if command then command = xml.copy(command) @@ -290,27 +300,27 @@ do if ctxdata.runlocal then newfile = file.basename(newfile) end - xml.each(command,"ctx:old", function(r,d,k) + for r, d, k in xml.elements(command,"ctx:old") do d[k] = ctxrunner.substitute(oldfile) - end) - xml.each(command,"ctx:new", function(r,d,k) + end + for r, d, k in xml.elements(command,"ctx:new") do d[k] = ctxrunner.substitute(newfile) - end) - -- message: preprocessing #{oldfile} into #{newfile} using #{pp} + end ctxdata.variables['old'] = oldfile ctxdata.variables['new'] = newfile - xml.each(command,"ctx:value", function(r,d,k) + for r, d, k in xml.elements(command,"ctx:value") do local ek = d[k] local ekat = ek.at and ek.at['name'] if ekat then d[k] = ctxrunner.substitute(ctxdata.variables[ekat] or "") end - end) + end -- potential optimization: when mtxrun run internal - command = xml.text(command) - command = ctxrunner.justtext(command) -- command is still xml element here + command = xml.content(command) + command = ctxrunner.justtext(command) logs.simple("command: %s",command) local result = os.spawn(command) or 0 + -- somehow we get the wrong return value if result > 0 then logs.simple("error, return code: %s",result) end @@ -340,6 +350,14 @@ do end + function ctxrunner.preppedfile(ctxdata,filename) + if ctxdata.prepfiles[file.basename(filename)] then + return filename .. ".prep" + else + return filename + end + end + end -- rest @@ -352,7 +370,9 @@ scripts.context.multipass = { function scripts.context.multipass.hashfiles(jobname) local hash = { } - for _, suffix in ipairs(scripts.context.multipass.suffixes) do + local suffixes = scripts.context.multipass.suffixes + for i=1,#suffixes do + local suffix = suffixes[i] local full = jobname .. suffix hash[full] = md5.hex(io.loaddata(full) or "unknown") end @@ -360,7 +380,7 @@ function scripts.context.multipass.hashfiles(jobname) end function scripts.context.multipass.changed(oldhash, newhash) - for k,v in pairs(oldhash) do + for k,v in next, oldhash do if v ~= newhash[k] then return true end @@ -398,7 +418,7 @@ function scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,curr end local function setvalues(flag,format,plural) if type(flag) == "table" then - for k, v in pairs(flag) do + for k, v in next, flag do f:write(format:format(v),"\n") end else @@ -434,9 +454,17 @@ function scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,curr if type(environment.argument("track")) == "string" then setvalue ("track" , "\\enabletrackers[%s]") end + if type(environment.argument("trackers")) == "string" then + setvalue ("trackers" , "\\enabletrackers[%s]") + end + if type(environment.argument("directives")) == "string" then + setvalue ("directives", "\\enabledirectives[%s]") + end setfixed ("timing" , "\\usemodule[timing]") setfixed ("batchmode" , "\\batchmode") + setfixed ("batch" , "\\batchmode") setfixed ("nonstopmode" , "\\nonstopmode") + setfixed ("nonstop" , "\\nonstopmode") setfixed ("tracefiles" , "\\tracefilestrue") setfixed ("nostats" , "\\nomkivstatistics") setfixed ("paranoid" , "\\def\\maxreadlevel{1}") @@ -451,10 +479,11 @@ function scripts.context.multipass.makeoptionfile(jobname,ctxdata,kindofrun,curr -- setalways("%% process info") -- - setalways( "\\setupsystem[\\c!n=%s,\\c!m=%s]", kindofrun or 0, currentrun or 0) - setalways( "\\setupsystem[\\c!type=%s]",os.platform) - setvalue ("inputfile" , "\\setupsystem[inputfile=%s]") + -- setvalue ("inputfile" , "\\setupsystem[inputfile=%s]") + setalways( "\\setupsystem[inputfile=%s]",environment.argument("input") or environment.files[1] or "\\jobname") setvalue ("result" , "\\setupsystem[file=%s]") + setalways( "\\setupsystem[\\c!n=%s,\\c!m=%s]", kindofrun or 0, currentrun or 0) + -- setalways( "\\setupsystem[\\c!type=%s]",os.type) -- windows or unix setvalues("path" , "\\usepath[%s]") setvalue ("setuppath" , "\\setupsystem[\\c!directory={%s}]") setvalue ("randomseed" , "\\setupsystem[\\c!random=%s]") @@ -557,18 +586,15 @@ scripts.context.interfaces = { it = "cont-it", ro = "cont-ro", pe = "cont-pe", - -- for taco and me - -- xp = "cont-xp", } scripts.context.defaultformats = { "cont-en", "cont-nl", --- "cont-xp", "mptopdf", -- "metatex", "metafun", - "plain" +-- "plain" } local function analyze(filename) @@ -585,6 +611,9 @@ local function analyze(filename) elseif line:find("^&1', file.replacesuffix(name,"pdf"))) -end -function scripts.context.closepdf(name) - os.spawn(string.format('pdfclose --file "%s" 2>&1', file.replacesuffix(name,"pdf"))) -end - --~ function scripts.context.openpdf(name) ---~ -- somehow two instances start up, one with a funny filename ---~ os.spawn(string.format("\"c:/program files/kde/bin/okular.exe\" --unique %s",file.replacesuffix(name,"pdf"))) +--~ os.spawn(string.format('pdfopen --file "%s" 2>&1', file.replacesuffix(name,"pdf"))) --~ end --~ function scripts.context.closepdf(name) ---~ -- +--~ os.spawn(string.format('pdfclose --file "%s" 2>&1', file.replacesuffix(name,"pdf"))) --~ end +local pdfview -- delayed loading + +function scripts.context.openpdf(name) + pdfview = pdfview or dofile(resolvers.find_file("l-pdfview.lua","tex")) + logs.simple("pdfview methods: %s, current method: %s, MTX_PDFVIEW_METHOD=%s",pdfview.methods(),pdfview.method,os.getenv(pdfview.METHOD) or "") + pdfview.open(file.replacesuffix(name,"pdf")) +end + +function scripts.context.closepdf(name) + pdfview = pdfview or dofile(resolvers.find_file("l-pdfview.lua","tex")) + pdfview.close(file.replacesuffix(name,"pdf")) +end + function scripts.context.run(ctxdata,filename) -- filename overloads environment.files local files = (filename and { filename }) or environment.files if ctxdata then -- todo: interface - for k,v in pairs(ctxdata.flags) do + for k,v in next, ctxdata.flags do environment.setargument(k,v) end end @@ -648,7 +682,8 @@ function scripts.context.run(ctxdata,filename) end -- if formatfile and scriptfile then - for _, filename in ipairs(files) do + for i=1,#files do + local filename = files[i] local basename, pathname = file.basename(filename), file.dirname(filename) local jobname = file.removesuffix(basename) if pathname == "" then @@ -657,11 +692,18 @@ function scripts.context.run(ctxdata,filename) -- look at the first line local a = analyze(filename) if a and (a.engine == 'pdftex' or a.engine == 'xetex' or environment.argument("pdftex") or environment.argument("xetex")) then - local texexec = resolvers.find_file("texexec.rb") or "" - if texexec ~= "" then - os.setenv("RUBYOPT","") - local command = string.format("ruby %s %s",texexec,environment.reconstruct_commandline(environment.arguments_after)) - os.exec(command) + if false then + -- we need to write a top etc too and run mp etc so it's not worth the + -- trouble, so it will take a while before the next is finished + -- + -- require "mtx-texutil.lua" + else + local texexec = resolvers.find_file("texexec.rb") or "" + if texexec ~= "" then + os.setenv("RUBYOPT","") + local command = string.format("ruby %s %s",texexec,environment.reconstruct_commandline(environment.arguments_after)) + os.exec(command) + end end else if a and a.interface and a.interface ~= interface then @@ -677,6 +719,7 @@ function scripts.context.run(ctxdata,filename) end if formatfile and scriptfile then -- we default to mkiv xml ! + -- the --prep argument might become automatic (and noprep) local suffix = file.extname(filename) or "?" if scripts.context.xmlsuffixes[suffix] or environment.argument("forcexml") then if environment.argument("mkii") then @@ -688,6 +731,9 @@ function scripts.context.run(ctxdata,filename) filename = makestub("\\ctxlua{context.runfile('%s')}",filename) elseif scripts.context.luasuffixes[suffix] or environment.argument("forcelua") then filename = makestub("\\ctxlua{dofile('%s')}",filename) + elseif environment.argument("prep") then + -- we need to keep the original jobname + filename = makestub("\\readfile{%s}{}{}",filename,ctxrunner.preppedfile(ctxdata,filename)) end -- -- todo: also other stubs @@ -701,7 +747,7 @@ function scripts.context.run(ctxdata,filename) oldbase = file.removesuffix(jobname) newbase = file.removesuffix(resultname) if oldbase ~= newbase then - for _, suffix in pairs(scripts.context.beforesuffixes) do + for _, suffix in next, scripts.context.beforesuffixes do local oldname = file.addsuffix(oldbase,suffix) local newname = file.addsuffix(newbase,suffix) local tmpname = "keep-"..oldname @@ -732,9 +778,13 @@ function scripts.context.run(ctxdata,filename) end -- local flags = { } - if environment.argument("batchmode") then + if environment.argument("batchmode") or environment.argument("batch") then flags[#flags+1] = "--interaction=batchmode" end + if environment.argument("synctex") then + logs.simple("warning: syntex is enabled") -- can add upto 5% runtime + flags[#flags+1] = "--synctex=1" + end flags[#flags+1] = "--fmt=" .. string.quote(formatfile) flags[#flags+1] = "--lua=" .. string.quote(scriptfile) flags[#flags+1] = "--backend=pdf" @@ -754,16 +804,16 @@ function scripts.context.run(ctxdata,filename) --~ scripts.context.make(formatname) --~ returncode, errorstring = os.spawn(command) --~ if returncode == 3 then - --~ logs.simple("fatal error, return code 3, message: %s",errorstring or "?") + --~ logs.simple("ks: return code 3, message: %s",errorstring or "?") --~ os.exit(1) --~ end --~ end if not returncode then - logs.simple("fatal error, no return code, message: %s",errorstring or "?") + logs.simple("fatal error: no return code, message: %s",errorstring or "?") os.exit(1) break elseif returncode > 0 then - logs.simple("fatal error, return code: %s",returncode or "?") + logs.simple("fatal error: return code: %s",returncode or "?") os.exit(returncode) break else @@ -784,24 +834,24 @@ function scripts.context.run(ctxdata,filename) logs.simple("arrange run: %s",command) local returncode, errorstring = os.spawn(command) if not returncode then - logs.simple("fatal error, no return code, message: %s",errorstring or "?") + logs.simple("fatal error: no return code, message: %s",errorstring or "?") os.exit(1) elseif returncode > 0 then - logs.simple("fatal error, return code: %s",returncode or "?") + logs.simple("fatal error: return code: %s",returncode or "?") os.exit(returncode) end end -- if environment.argument("purge") then - scripts.context.purge_job(filename) + scripts.context.purge_job(jobname) elseif environment.argument("purgeall") then - scripts.context.purge_job(filename,true) + scripts.context.purge_job(jobname,true) end -- os.remove(jobname..".top") -- if resultname then - for _, suffix in pairs(scripts.context.aftersuffixes) do + for _, suffix in next, scripts.context.aftersuffixes do local oldname = file.addsuffix(oldbase,suffix) local newname = file.addsuffix(newbase,suffix) local tmpname = "keep-"..oldname @@ -910,9 +960,11 @@ function scripts.context.make(name) (environment.argument("xetex") and "mtxrun texexec.rb --make --xetex " ) or false, } local list = (name and { name }) or (environment.files[1] and environment.files) or scripts.context.defaultformats - for _, name in ipairs(list) do + for i=1,#list do + local name = list[i] name = scripts.context.interfaces[name] or name - for _, runner in ipairs(runners) do + for i=1,#runners do + local runner = runners[i] if runner then local command = runner .. name logs.simple("running command: %s",command) @@ -970,24 +1022,29 @@ local loaded = false function scripts.context.metapost() local filename = environment.files[1] or "" ---~ local tempname = "mtx-context-metapost.tex" ---~ local tempdata = string.format(template,"metafun",filename) ---~ io.savedata(tempname,tempdata) ---~ environment.files[1] = tempname ---~ environment.setargument("result",file.removesuffix(filename)) ---~ environment.setargument("once",true) ---~ scripts.context.run() if not loaded then dofile(resolvers.find_file("mlib-run.lua")) loaded = true commands = commands or { } commands.writestatus = logs.report end - local formatname = environment.arguments("format") or "metafun" + local formatname = environment.argument("format") or "metafun" if formatname == "" or type(format) == "boolean" then formatname = "metafun" end - if environment.arguments("svg") then + if environment.argument("pdf") then + local basename = file.removesuffix(filename) + local resultname = environment.argument("result") or basename + local jobname = "mtx-context-metapost" + local tempname = file.addsuffix(jobname,"tex") + io.savedata(tempname,string.format(template,"metafun",filename)) + environment.files[1] = tempname + environment.setargument("result",resultname) + environment.setargument("once",true) + scripts.context.run() + scripts.context.purge_job(jobname,true) + scripts.context.purge_job(resultname,true) + elseif environment.argument("svg") then metapost.directrun(formatname,filename,"svg") else metapost.directrun(formatname,filename,"mps") @@ -1041,7 +1098,7 @@ local function purge_file(dfile,cfile) if os.remove(dfile) then return file.basename(dfile) end - else + elseif dfile then if os.remove(dfile) then return file.basename(dfile) end @@ -1049,22 +1106,24 @@ local function purge_file(dfile,cfile) end function scripts.context.purge_job(jobname,all) - jobname = file.basename(jobname) - local filebase = file.removesuffix(jobname) - local deleted = { } - for _, suffix in ipairs(obsolete_results) do - deleted[#deleted+1] = purge_file(filebase.."."..suffix,filebase..".pdf") - end - for _, suffix in ipairs(temporary_runfiles) do - deleted[#deleted+1] = purge_file(filebase.."."..suffix) - end - if all then - for _, suffix in ipairs(persistent_runfiles) do - deleted[#deleted+1] = purge_file(filebase.."."..suffix) + if jobname and jobname ~= "" then + jobname = file.basename(jobname) + local filebase = file.removesuffix(jobname) + local deleted = { } + for i=1,#obsolete_results do + deleted[#deleted+1] = purge_file(filebase.."."..obsolete_results[i],filebase..".pdf") + end + for i=1,#temporary_runfiles do + deleted[#deleted+1] = purge_file(filebase.."."..temporary_runfiles[i]) + end + if all then + for i=1,#persistent_runfiles do + deleted[#deleted+1] = purge_file(filebase.."."..persistent_runfiles[i]) + end + end + if #deleted > 0 then + logs.simple("purged files: %s", table.join(deleted,", ")) end - end - if #deleted > 0 then - logs.simple("purged files: %s", table.join(deleted,", ")) end end @@ -1077,7 +1136,8 @@ function scripts.context.purge(all) local persistent = table.tohash(persistent_runfiles) local generic = table.tohash(generic_files) local deleted = { } - for _, name in ipairs(files) do + for i=1,#files do + local name = files[i] local suffix = file.extname(name) local basename = file.basename(name) if obsolete[suffix] or temporary[suffix] or persistent[suffix] or generic[basename] then @@ -1146,7 +1206,8 @@ function scripts.context.extras(pattern) else logs.extendbanner(extra) end - for k,v in ipairs(list) do + for i=1,#list do + local v = list[i] local data = io.loaddata(v) or "" data = string.match(data,"begin help(.-)end help") if data then @@ -1154,6 +1215,7 @@ function scripts.context.extras(pattern) for s in string.gmatch(data,"%% *(.-)[\n\r]") do h[#h+1] = s end + h[#h+1] = "" logs.help(table.concat(h,"\n"),"nomoreinfo") end end @@ -1191,8 +1253,15 @@ end -- todo: we need to do a dummy run -function scripts.context.track() - environment.files = { "m-track" } +function scripts.context.trackers() + environment.files = { resolvers.find_file("m-trackers.tex") } + scripts.context.multipass.nofruns = 1 + scripts.context.run() + -- maybe filter from log +end + +function scripts.context.directives() + environment.files = { resolvers.find_file("m-directives.tex") } scripts.context.multipass.nofruns = 1 scripts.context.run() -- maybe filter from log @@ -1338,35 +1407,65 @@ function scripts.context.update() end end -logs.extendbanner("ConTeXt Tools 0.51",true) +logs.extendbanner("ConTeXt Process Management 0.51",true) messages.help = [[ --run process (one or more) files (default action) --make create context formats ---generate generate file database etc. ---ctx=name use ctx file ---version report installed context version + +--ctx=name use ctx file (process management specification) +--interface use specified user interface (default: en) + +--autopdf close pdf file in viewer and start pdf viewer afterwards +--purge(all) purge files either or not after a run (--pattern=...) + +--usemodule=list load the given module or style, normally part o fthe distribution +--environment=list load the given environment file first (document styles) +--mode=list enable given the modes (conditional processing in styles) +--path=list also consult the given paths when files are looked for +--arguments=list set variables that can be consulted during a run (key/value pairs) +--randomseed=number set the randomseed +--result=name rename the resulting output to the given name +--trackers=list show/set tracker variables +--directives=list show/set directive variables + --forcexml force xml stub (optional flag: --mkii) --forcecld force cld (context lua document) stub ---autopdf close pdf file in viewer and start pdf viewer afterwards ---once only one run ---purge(all) purge files (--pattern=...) ---result=name rename result to given name ---arrange run extra arrange pass + +--arrange run extra imposition pass, given that the style sets up imposition +--noarrange ignore imposition specifications in the style + +--once only run once (no multipass data file is produced) +--batchmode run without stopping and don't show messages on the console +--nonstopmode run without stopping + +--generate generate file database etc. (as luatools does) +--paranoid don't descend to .. and ../.. +--version report installed context version --expert expert options ---interface use specified user interface ]] +-- filter=list is kind of obsolete +-- color is obsolete for mkiv, always on +-- separation is obsolete for mkiv, no longer available +-- output is currently obsolete for mkiv +-- setuppath=list must check +-- modefile=name must check +-- input=name load the given inputfile (must check) + messages.expert = [[ expert options: --touch update context version number (remake needed afterwards, also provide --expert) +--nostats omit runtime statistics at the end of the run --update update context from website (not to be confused with contextgarden) --profile profile job (use: mtxrun --script profile --analyse) ---track show/set tracker variables --timing generate timing and statistics overview +--tracefiles show some extra info when locating files (at the tex end) + --extra=name process extra (mtx-context- in distribution) +--extras show extras ]] messages.private = [[ @@ -1387,6 +1486,8 @@ special options: if environment.argument("once") then scripts.context.multipass.nofruns = 1 +elseif environment.argument("runs") then + scripts.context.multipass.nofruns = tonumber(environment.argument("runs")) or nil end if environment.argument("profile") then @@ -1417,12 +1518,22 @@ elseif environment.argument("update") then scripts.context.update() elseif environment.argument("expert") then logs.help(table.join({ messages.expert, messages.private, messages.special },"\n")) +elseif environment.argument("extras") then + scripts.context.extras() elseif environment.argument("extra") then scripts.context.extra() elseif environment.argument("help") then - logs.help(messages.help) -elseif environment.argument("track") and type(environment.argument("track")) == "boolean" then - scripts.context.track() + if environment.files[1] == "extras" then + scripts.context.extras() + else + logs.help(messages.help) + end +elseif environment.argument("trackers") and type(environment.argument("trackers")) == "boolean" then + scripts.context.trackers() +elseif environment.argument("directives") and type(environment.argument("directives")) == "boolean" then + scripts.context.directives() +elseif environment.argument("track") and type(environment.argument("track")) == "boolean" then -- for old times sake, will go + scripts.context.trackers() elseif environment.files[1] then -- scripts.context.timed(scripts.context.run) scripts.context.timed(scripts.context.autoctx) diff --git a/Master/texmf-dist/scripts/context/lua/mtx-convert.lua b/Master/texmf-dist/scripts/context/lua/mtx-convert.lua index c0c383b176f..62198a62188 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-convert.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-convert.lua @@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['mtx-convert'] = { graphics = graphics or { } graphics.converters = graphics.converters or { } -local gsprogram = (os.platform == "windows" and "gswin32c") or "gs" +local gsprogram = (os.type == "windows" and "gswin32c") or "gs" local gstemplate = "%s -q -sDEVICE=pdfwrite -dEPSCrop -dNOPAUSE -dNOCACHE -dBATCH -dAutoRotatePages=/None -dProcessColorModel=/DeviceCMYK -sOutputFile=%s %s -c quit" function graphics.converters.eps(oldname,newname) @@ -31,6 +31,7 @@ function graphics.converters.jpg(oldname,newname) return imtemplate[quality]:format(improgram,oldname,newname) end +graphics.converters.gif = graphics.converters.jpg graphics.converters.tif = graphics.converters.jpg graphics.converters.tiff = graphics.converters.jpg graphics.converters.png = graphics.converters.jpg @@ -111,13 +112,14 @@ function scripts.convert.convertall() end function scripts.convert.convertgiven() - for _, name in ipairs(environment.files) do - graphics.converters.convertfile(name) + local files = environment.files + for i=1,#files do + graphics.converters.convertfile(files[i]) end end -logs.extendbanner("Graphic Conversion Tools 0.10",true) +logs.extendbanner("ConTeXT Graphic Conversion Helpers 0.10",true) messages.help = [[ --convertall convert all graphics on path diff --git a/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua b/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua index dd319047590..74012ae38d3 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-fonts.lua @@ -15,30 +15,81 @@ dofile(resolvers.find_file("font-mis.lua","tex")) scripts = scripts or { } scripts.fonts = scripts.fonts or { } -function scripts.fonts.reload(verbose) - fonts.names.load(true,verbose) -end - -function scripts.fonts.names(name) - name = name or "luatex-fonts-names.lua" +function fonts.names.simple() + local simpleversion = 1.001 + local simplelist = { "ttf", "otf", "ttc", "dfont" } + local name = "luatex-fonts-names.lua" + fonts.names.filters.list = simplelist + fonts.names.version = simpleversion -- this number is the same as in font-dum.lua + logs.report("fontnames","generating font database for 'luatex-fonts' version %s",fonts.names.version) fonts.names.identify(true) local data = fonts.names.data if data then - data.fallback_mapping = nil + local simplemappings = { } + local simplified = { + mappings = simplemappings, + version = simpleversion, + } + local specifications = data.specifications + for i=1,#simplelist do + local format = simplelist[i] + for tag, index in next, data.mappings[format] do + local s = specifications[index] + simplemappings[tag] = { s.rawname, s.filename, s.subfont } + end + end logs.report("fontnames","saving names in '%s'",name) - io.savedata(name,table.serialize(data,true)) + io.savedata(name,table.serialize(simplified,true)) + local data = io.loaddata(resolvers.find_file("font-dum.lua","tex")) + local dummy = string.match(data,"fonts%.names%.version%s*=%s*([%d%.]+)") + if tonumber(dummy) ~= simpleversion then + logs.report("fontnames","warning: version number %s in 'font-dum' does not match database version number %s",dummy or "?",simpleversion) + end elseif lfs.isfile(name) then os.remove(name) end end -local function showfeatures(v,n,f,s,t) - logs.simple("fontname: %s",v) - logs.simple("fullname: %s",n) - logs.simple("filename: %s",f) - local features = fonts.get_features(f,t) +function scripts.fonts.reload() + if environment.argument("simple") then + fonts.names.simple() + else + fonts.names.load(true) + end +end + +local function subfont(sf) + if sf then + return string.format("index: % 2s", sf) + else + return "" + end +end + +local function fontweight(fw) + if fw then + return string.format("conflict: %s", fw) + else + return "" + end +end + +local function showfeatures(tag,specification) + logs.simple("mapping : %s",tag) + logs.simple("fontname: %s",specification.fontname) + logs.simple("fullname: %s",specification.fullname) + logs.simple("filename: %s",specification.filename) + logs.simple("family : %s",specification.familyname or "") + logs.simple("weight : %s",specification.weight or "") + logs.simple("style : %s",specification.style or "") + logs.simple("width : %s",specification.width or "") + logs.simple("variant : %s",specification.variant or "") + logs.simple("subfont : %s",subfont(specification.subfont)) + logs.simple("fweight : %s",fontweight(specification.fontweight)) + -- maybe more + local features = fonts.get_features(specification.filename,specification.format) if features then - for what, v in table.sortedpairs(features) do + for what, v in table.sortedhash(features) do local data = features[what] if data and next(data) then logs.simple() @@ -46,9 +97,9 @@ local function showfeatures(v,n,f,s,t) logs.simple() logs.simple("feature script languages") logs.simple() - for f,ff in table.sortedpairs(data) do + for f,ff in table.sortedhash(data) do local done = false - for s, ss in table.sortedpairs(ff) do + for s, ss in table.sortedhash(ff) do if s == "*" then s = "all" end if ss ["*"] then ss["*"] = nil ss.all = true end if done then @@ -61,56 +112,146 @@ local function showfeatures(v,n,f,s,t) end end end + else + logs.simple() + logs.simple("no features") + logs.simple() end logs.reportline() end -function scripts.fonts.list(pattern,reload,all,info) +local function reloadbase(reload) if reload then logs.simple("fontnames, reloading font database") - end - -- make a function for this - pattern = pattern:lower() - pattern = pattern:gsub("%-","%%-") - pattern = pattern:gsub("%.","%%.") - pattern = pattern:gsub("%*",".*") - pattern = pattern:gsub("%?",".?") - if pattern == "" then - pattern = ".*" - else - pattern = "^" .. pattern .. "$" - end - -- - local t = fonts.names.list(pattern,reload) - if reload then + names.load(true) logs.simple("fontnames, done\n\n") end +end + +local function list_specifications(t,info) if t then - local s, w = table.sortedkeys(t), { 0, 0, 0 } - local function action(f) - for k,v in ipairs(s) do - local type, name, file, sub = unpack(t[v]) - f(v,name,file,sub,type) + local s = table.sortedkeys(t) + if info then + for k=1,#s do + local v = s[k] + showfeatures(v,t[v]) + end + else + for k=1,#s do + local v = s[k] + local entry = t[v] + s[k] = { + entry.familyname or "", + entry.weight or "", + entry.style or "", + entry.width or "", + entry.variant or "", + entry.fontname, + entry.filename, + subfont(entry.subfont), + fontweight(entry.fontweight), + } + e[k] = entry + end + table.formatcolumns(s) + for k=1,#s do + local v = s[k] + texio.write_nl(v) end end - action(function(v,n,f,s,t) - if #v > w[1] then w[1] = #v end - if #n > w[2] then w[2] = #n end - if #f > w[3] then w[3] = #f end - end) - action(function(v,n,f,s,t) - if s then s = "(sub)" else s = "" end - if info then - showfeatures(v,n,f,s,t) - else - local str = string.format("%s %s %s %s",v:padd(w[1]," "),n:padd(w[2]," "),f:padd(w[3]," "), s) - print(str:strip()) + end +end + +local function list_matches(t,info) + if t then + local s, w = table.sortedkeys(t), { 0, 0, 0 } + if info then + for k=1,#s do + local v = s[k] + showfeatures(v,t[v]) end - end) + else + for k=1,#s do + local v = s[k] + local entry = t[v] + s[k] = { + v, + entry.fontname, + entry.filename, + subfont(entry.subfont) + } + end + table.formatcolumns(s) + for k=1,#s do + texio.write_nl(s[k]) + end + end end end -function scripts.fonts.save(name,sub) +function scripts.fonts.list() + + local all = environment.argument("all") + local info = environment.argument("info") + local reload = environment.argument("reload") + local pattern = environment.argument("pattern") + local filter = environment.argument("filter") + local given = environment.files[1] + + reloadbase(reload) + + if environment.argument("name") then + if pattern then + --~ mtxrun --script font --list --name --pattern=*somename* + list_matches(fonts.names.list(string.topattern(pattern,true),reload,all),info) + elseif filter then + logs.report("fontnames","not supported: --list --name --filter",name) + elseif given then + --~ mtxrun --script font --list --name somename + list_matches(fonts.names.list(given,reload,all),info) + else + logs.report("fontnames","not supported: --list --name ",name) + end + elseif environment.argument("spec") then + if pattern then + --~ mtxrun --script font --list --spec --pattern=*somename* + logs.report("fontnames","not supported: --list --spec --pattern",name) + elseif filter then + --~ mtxrun --script font --list --spec --filter="fontname=somename" + list_specifications(fonts.names.getlookups(filter),info) + elseif given then + --~ mtxrun --script font --list --spec somename + list_specifications(fonts.names.collectspec(given,reload,all),info) + else + logs.report("fontnames","not supported: --list --spec ",name) + end + elseif environment.argument("file") then + if pattern then + --~ mtxrun --script font --list --file --pattern=*somename* + list_specifications(fonts.names.collectfiles(string.topattern(pattern,true),reload,all),info) + elseif filter then + logs.report("fontnames","not supported: --list --spec",name) + elseif given then + --~ mtxrun --script font --list --file somename + list_specifications(fonts.names.collectfiles(given,reload,all),info) + else + logs.report("fontnames","not supported: --list --file ",name) + end + elseif pattern then + --~ mtxrun --script font --list --pattern=*somename* + list_matches(fonts.names.list(string.topattern(pattern,true),reload,all),info) + elseif given then + --~ mtxrun --script font --list somename + list_matches(fonts.names.list(given,reload,all),info) + else + logs.report("fontnames","not supported: --list ",name) + end + +end + +function scripts.fonts.save() + local name = environment.files[1] or "" + local sub = environment.files[2] or "" local function save(savename,fontblob) if fontblob then savename = savename:lower() .. ".lua" @@ -128,12 +269,15 @@ function scripts.fonts.save(name,sub) if fontinfo then logs.simple("font: %s located as %s",name,filename) if fontinfo[1] then - for _, v in ipairs(fontinfo) do + for k=1,#fontinfo do + local v = fontinfo[k] save(v.fontname,fontloader.open(filename,v.fullname)) end else save(fontinfo.fullname,fontloader.open(filename)) end + else + logs.simple("font: %s cannot be read",filename) end else logs.simple("font: %s not saved",filename) @@ -141,35 +285,61 @@ function scripts.fonts.save(name,sub) else logs.simple("font: %s not found",name) end + else + logs.simple("font: no name given") end end -logs.extendbanner("Font Tools 0.20",true) +logs.extendbanner("ConTeXt Font Database Management 0.21",true) messages.help = [[ ---reload generate new font database ---list [--info] list installed fonts (show info) --save save open type font in raw table ---names generate 'luatex-fonts-names.lua' (not for context!) ---pattern=str filter files ---all provide alternatives +--reload generate new font database +--reload --simple generate 'luatex-fonts-names.lua' (not for context!) + +--list --name list installed fonts, filter by name [--pattern] +--list --spec list installed fonts, filter by spec [--filter] +--list --file list installed fonts, filter by file [--pattern] + +--pattern=str filter files using pattern +--filter=list key-value pairs +--all show all found instances +--info give more details +--track=list enable trackers + +examples of searches: + +mtxrun --script font --list somename (== --pattern=*somename*) + +mtxrun --script font --list --name somename +mtxrun --script font --list --name --pattern=*somename* + +mtxrun --script font --list --spec somename +mtxrun --script font --list --spec somename-bold-italic +mtxrun --script font --list --spec --pattern=*somename* +mtxrun --script font --list --spec --filter="fontname=somename" +mtxrun --script font --list --spec --filter="familyname=somename,weight=bold,style=italic,width=condensed" + +mtxrun --script font --list --file somename +mtxrun --script font --list --file --pattern=*somename* ]] -if environment.argument("reload") then - scripts.fonts.reload(true) -elseif environment.argument("names") then - scripts.fonts.names() -elseif environment.argument("list") then - local pattern = environment.argument("pattern") or environment.files[1] or "" - local all = environment.argument("all") - local info = environment.argument("info") - local reload = environment.argument("reload") - scripts.fonts.list(pattern,reload,all,info) +local track = environment.argument("track") + +if track then trackers.enable(track) end + +if environment.argument("names") then + environment.setargument("reload",true) + environment.setargument("simple",true) +end + +if environment.argument("list") then + scripts.fonts.list() +elseif environment.argument("reload") then + scripts.fonts.reload() elseif environment.argument("save") then - local name = environment.files[1] or "" - local sub = environment.files[2] or "" - scripts.fonts.save(name,sub) + scripts.fonts.save() else logs.help(messages.help) end diff --git a/Master/texmf-dist/scripts/context/lua/mtx-grep.lua b/Master/texmf-dist/scripts/context/lua/mtx-grep.lua index a6617d711f7..9604bc9f85d 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-grep.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-grep.lua @@ -70,7 +70,9 @@ function scripts.grep.find(pattern, files, offset) end local capture = (content/check)^0 for i=offset or 1, #files do - for _, nam in ipairs(dir.glob(files[i])) do + local globbed = dir.glob(files[i]) + for i=1,#globbed do + local nam = globbed[i] name = nam local data = io.loaddata(name) if data then diff --git a/Master/texmf-dist/scripts/context/lua/mtx-interface.lua b/Master/texmf-dist/scripts/context/lua/mtx-interface.lua index 264a2dbe4f4..730a030d930 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-interface.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-interface.lua @@ -18,7 +18,8 @@ local messageinterfaces = { 'en','cs','de','it','nl','ro','fr','pe','no' } function flushers.scite(interface,collection) local result, i = {}, 0 result[#result+1] = format("keywordclass.macros.context.%s=",interface) - for _, command in ipairs(collection) do + for i=1,#collection do + local command = collection[i] if i==0 then result[#result+1] = "\\\n" i = 5 @@ -38,7 +39,8 @@ function flushers.jedit(interface,collection) result[#result+1] = "" result[#result+1] = "\t" result[#result+1] = "\t\t" - for _, command in ipairs(collection) do + for i=1,#collection do + local command = collection[i] result[#result+1] = format("\t\t\t%s",command) end result[#result+1] = "\t\t" @@ -52,7 +54,8 @@ function flushers.bbedit(interface,collection) result[#result+1] = "" result[#result+1] = "BBLMKeywordList" result[#result+1] = "" - for _, command in ipairs(collection) do + for i=1,#collection do + local command = collection[i] result[#result+1] = format("\t\\%s",command) end result[#result+1] = "" @@ -60,7 +63,8 @@ function flushers.bbedit(interface,collection) end function flushers.raw(interface,collection) - for _, command in ipairs(collection) do + for i=1,#collection do + local command = collection[i] logs.simple(command) end end @@ -74,7 +78,8 @@ function scripts.interface.editor(editor) if xmlfile == "" then logs.simple("unable to locate cont-en.xml") end - for _, interface in ipairs(interfaces) do + for i=1,#interfaces do + local interface = interfaces[i] local keyfile = resolvers.find_file(format("keys-%s.xml",interface)) or "" if keyfile == "" then logs.simple("unable to locate keys-*.xml") @@ -138,7 +143,7 @@ function scripts.interface.check() end function scripts.interface.context() - local filename = resolvers.find_file("mult-def.lua") or "" + local filename = resolvers.find_file(environment.files[1] or "mult-def.lua") or "" if filename ~= "" then local interface = dofile(filename) if interface and next(interface) then @@ -150,7 +155,9 @@ function scripts.interface.context() texresult[#texresult+1] = format("%% definitions for interface %s for language %s\n%%",what,language) xmlresult[#xmlresult+1] = format("\t\n",what,language) xmlresult[#xmlresult+1] = format("\t",what) - for _, key in ipairs(table.sortedkeys(t)) do + local sorted = table.sortedkeys(t) + for i=1,#sorted do + local key = sorted[i] local v = t[key] local value = v[language] or v["en"] if not value then @@ -178,7 +185,7 @@ function scripts.interface.context() return a .. b .. c .. b end) end - for language, _ in pairs(commands.setuplayout) do + for language, _ in next, commands.setuplayout do local texresult, xmlresult = { }, { } texresult[#texresult+1] = format("%% this file is auto-generated, don't edit this file\n%%") xmlresult[#xmlresult+1] = format("\n",tag) @@ -213,10 +220,11 @@ function scripts.interface.context() end function scripts.interface.messages() - local filename = resolvers.find_file("mult-mes.lua") or "" + local filename = resolvers.find_file(environment.files[1] or "mult-mes.lua") or "" if filename ~= "" then local messages = dofile(filename) - for _, interface in ipairs(messageinterfaces) do + for i=1,#messageinterfaces do + local interface = messageinterfaces[i] local texresult = { } for category, data in next, messages do for tag, message in next, data do @@ -234,7 +242,7 @@ function scripts.interface.messages() end end -logs.extendbanner("Interface Tools 0.11",true) +logs.extendbanner("ConTeXt Interface Related Goodies 0.11",true) messages.help = [[ --scite generate scite interface diff --git a/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua b/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua index f8c871a7b1c..4453e2ccb47 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-metatex.lua @@ -49,7 +49,7 @@ function scripts.metatex.timed(action) statistics.timed(action) end -logs.extendbanner("MetaTeX Tools 0.10",true) +logs.extendbanner("MetaTeX Process Management 0.10",true) messages.help = [[ --run process (one or more) files (default action) diff --git a/Master/texmf-dist/scripts/context/lua/mtx-modules.lua b/Master/texmf-dist/scripts/context/lua/mtx-modules.lua new file mode 100644 index 00000000000..3a348593f8e --- /dev/null +++ b/Master/texmf-dist/scripts/context/lua/mtx-modules.lua @@ -0,0 +1,167 @@ +if not modules then modules = { } end modules ['mtx-modules'] = { + version = 1.002, + comment = "companion to mtxrun.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +scripts = scripts or { } +scripts.modules = scripts.modules or { } + +-- Documentation can be woven into a source file. This script can generates +-- a file with the documentation and source fragments properly tagged. The +-- documentation is included as comment: +-- +-- %D ...... some kind of documentation +-- %M ...... macros needed for documenation +-- %S B begin skipping +-- %S E end skipping +-- +-- The generated file is structured as: +-- +-- \starttypen +-- \startmodule[type=suffix] +-- \startdocumentation +-- \stopdocumentation +-- \startdefinition +-- \stopdefinition +-- \stopmodule +-- \stoptypen +-- +-- Macro definitions specific to the documentation are not surrounded by +-- start-stop commands. The suffix specificaction can be overruled at runtime, +-- but defaults to the file extension. This specification can be used for language +-- depended verbatim typesetting. + +local find, format, sub, is_empty, strip = string.find, string.format, string.sub, string.is_empty, string.strip + +local function source_to_ted(inpname,outname,filetype) + local inp = io.open(inpname) + if not inp then + logs.simple("unable to open '%s'",inpname) + return + end + local out = io.open(outname,"w") + if not out then + logs.simple("unable to open '%s'",outname) + return + end + logs.simple("converting '%s' to '%s'",inpname,outname) + local skiplevel, indocument, indefinition = 0, false, false + out:write(format("\\startmodule[type=%s]\n",filetype or file.suffix(inpname))) + for line in inp:lines() do +--~ line = strip(line) + if find(line,"^%%D ") or find(line,"^%%D$") then + if skiplevel == 0 then + local someline = (#line < 3 and "") or sub(line,4,#line) + if indocument then + out:write(format("%s\n",someline)) + else + if indefinition then + out:write("\\stopdefinition\n") + indefinition = false + end + if not indocument then + out:write("\n\\startdocumentation\n") + end + out:write(format("%s\n",someline)) + indocument = true + end + end + elseif find(line,"^%%M ") or find(line,"^%%M$") then + if skiplevel == 0 then + local someline = (#line < 3 and "") or sub(line,4,#line) + out:write(format("%s\n",someline)) + end + elseif find(line,"^%%S B") then + skiplevel = skiplevel + 1 + elseif find(line,"^%%S E") then + skiplevel = skiplevel - 1 + elseif find(line,"^%%") then + -- nothing + elseif skiplevel == 0 then + inlocaldocument = indocument + inlocaldocument = false + local someline = line + if indocument then + out:write("\\stopdocumentation\n") + indocument = false + end + if indefinition then + if is_empty(someline) then + out:write("\\stopdefinition\n") + indefinition = false + else + out:write(format("%s\n",someline)) + end + elseif not is_empty(someline) then + out:write("\n\\startdefinition\n") + indefinition = true + if inlocaldocument then + -- nothing + else + out:write(format("%s\n",someline)) + end + end + end + end + if indocument then + out:write("\\stopdocumentation\n") + end + if indefinition then + out:write("\\stopdefinition\n") + end + out:write("\\stopmodule\n") + out:close() + inp:close() + return true +end + +local suffixes = table.tohash { 'tex','mkii','mkiv','mp' } + +function scripts.modules.process(runtex) + local processed = { } + local prep = environment.argument("prep") + local files = environment.files + for i=1,#files do + local shortname = files[i] + local suffix = file.suffix(shortname) + if suffixes[suffix] then + local longname + if prep then + longname = shortname .. ".prep" + else + longname = file.removesuffix(shortname) .. "-" .. suffix .. ".ted" + end + local done = source_to_ted(shortname,longname) + if done and runtex then + os.execute(format("mtxrun --script context --usemodule=mod-01 %s",longname)) + processed[#processed+1] = longname + end + end + end + for i=1,#processed do + local name = processed[i] + logs.simple("modules","processed: %s",name) + end +end + +-- context --ctx=m-modules.ctx xxx.mkiv + + +logs.extendbanner("ConTeXt Module Documentation Generators 1.00",true) + +messages.help = [[ +--convert convert source files (tex, mkii, mkiv, mp) to 'ted' files +--process process source files (tex, mkii, mkiv, mp) to 'pdf' files +--prep use original name with suffix 'prep' appended +]] + +if environment.argument("process") then + scripts.modules.process(true) +elseif environment.argument("convert") then + scripts.modules.process(false) +else + logs.help(messages.help) +end diff --git a/Master/texmf-dist/scripts/context/lua/mtx-mptopdf.lua b/Master/texmf-dist/scripts/context/lua/mtx-mptopdf.lua index 4243625adb9..342ff1c284a 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-mptopdf.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-mptopdf.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['mtx-mptopdf'] = { version = 1.303, - comment = "companion to mtxrun.lua", + comment = "companion to mtxrun.lua, patched by HH so errors are his", author = "Taco Hoekwater, Elvenkind BV, Dordrecht NL", copyright = "Elvenkind BV / ConTeXt Development Team", license = "see context related readme files" @@ -10,40 +10,30 @@ scripts = scripts or { } scripts.mptopdf = scripts.mptopdf or { } scripts.mptopdf.aux = scripts.mptopdf.aux or { } -do - -- setup functions and variables here +local dosish = os.type == 'windows' +local miktex = dosish and environment.TEXSYSTEM and environment.TEXSYSTEM:find("miktex") +local escapeshell = environment.SHELL and environment.SHELL:find("sh") and true - local dosish, miktex, escapeshell = false, false, false +function scripts.mptopdf.aux.find_latex(fname) + local d = io.loaddata(fname) or "" + return d:find("\\documentstyle") or d:find("\\documentclass") or d:find("\\begin{document}") +end - if os.platform == 'windows' then - dosish = true - if environment.TEXSYSTEM and environment.TEXSYSTEM:find("miktex") then - miktex = true +function scripts.mptopdf.aux.do_convert (fname) + local command, done, pdfdest = "", 0, "" + if fname:find(".%d+$") or fname:find("%.mps$") then + if miktex then + command = "pdftex -undump=mptopdf" + else + command = "pdftex -fmt=mptopdf -progname=context" end - end - if environment.SHELL and environment.SHELL:find("sh") then - escapeshell = true - end - - function scripts.mptopdf.aux.find_latex(fname) - local d = io.loaddata(fname) or "" - return d:find("\\documentstyle") or d:find("\\documentclass") or d:find("\\begin{document}") - end - - function scripts.mptopdf.aux.do_convert (fname) - local command, done, pdfdest = "", 0, "" - if fname:find(".%d+$") or fname:find("%.mps$") then - if miktex then - command = "pdftex -undump=mptopdf" - else - command = "pdftex -fmt=mptopdf -progname=context" - end - if dosish then - command = string.format('%s \\relax "%s"',command,fname) - else - command = string.format('%s \\\\relax "%s"',command,fname) - end - os.execute(command) + if dosish then + command = string.format('%s \\relax "%s"',command,fname) + else + command = string.format('%s \\\\relax "%s"',command,fname) + end + local result = os.execute(command) + if result == 0 then local name, suffix = file.nameonly(fname), file.extname(fname) local pdfsrc = name .. ".pdf" if lfs.isfile(pdfsrc) then @@ -55,29 +45,28 @@ do done = 1 end end - return done, pdfdest end + return done, pdfdest +end - function scripts.mptopdf.aux.make_mps(fn,latex,rawmp,metafun) - local rest, mpbin = latex and " --tex=latex " or " ", "" - if rawmp then - if metafun then - mpbin = "mpost --progname=mpost --mem=metafun" - else - mpbin = "mpost --mem=mpost" - end +function scripts.mptopdf.aux.make_mps(fn,latex,rawmp,metafun) + local rest, mpbin = latex and " --tex=latex " or " ", "" + if rawmp then + if metafun then + mpbin = "mpost --progname=mpost --mem=metafun" else - if latex then - mpbin = "mpost --mem=mpost" - else - mpbin = "texexec --mptex" - end + mpbin = "mpost --mem=mpost" end - local runner = mpbin .. rest .. fn - logs.simple("running: %s\n", runner) - return (os.execute(runner)) - end - + else + if latex then + mpbin = "mpost --mem=mpost" + else + mpbin = "texexec --mptex" + end + end + local runner = mpbin .. rest .. fn + logs.simple("running: %s\n", runner) + return (os.execute(runner)) end function scripts.mptopdf.convertall() @@ -97,7 +86,8 @@ function scripts.mptopdf.convertall() exit(1) end local report = { } - for _,fn in ipairs(files) do + for i=1,#files do + local fn = files[i] local success, name = scripts.mptopdf.aux.do_convert(fn) if success > 0 then report[#report+1] = { fn, name } @@ -106,11 +96,12 @@ function scripts.mptopdf.convertall() if #report > 0 then logs.simple("number of converted files: %i", #report) logs.simple("") - for _, r in ipairs(report) do + for i=1,#report do + local r = report[i] logs.simple("%s => %s", r[1], r[2]) end else - logs.simple("no input files match %s", table.concat(files,' ')) + logs.simple("no files are converted") end else logs.simple("no files match %s", table.concat(environment.files,' ')) diff --git a/Master/texmf-dist/scripts/context/lua/mtx-mtxworks.lua b/Master/texmf-dist/scripts/context/lua/mtx-mtxworks.lua new file mode 100644 index 00000000000..1239ae4c5b5 --- /dev/null +++ b/Master/texmf-dist/scripts/context/lua/mtx-mtxworks.lua @@ -0,0 +1,14 @@ +if not modules then modules = { } end modules ['mtx-mtxworks'] = { + version = 1.002, + comment = "companion to mtxrun.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this is a shortcut to "mtxrun --script texworks --start" + +environment.setargument("start",true) + +require "mtx-texworks" + diff --git a/Master/texmf-dist/scripts/context/lua/mtx-package.lua b/Master/texmf-dist/scripts/context/lua/mtx-package.lua index 06c89907adf..b36fc0ed89c 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-package.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-package.lua @@ -55,7 +55,7 @@ function scripts.package.merge_luatex_files(name,strip) end end -logs.extendbanner("Package Tools 0.1",true) +logs.extendbanner("Distribution Related Goodies 0.10",true) messages.help = [[ --merge merge 'loadmodule' into merge file diff --git a/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua b/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua index 7f130465bdf..293016991ff 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-patterns.lua @@ -14,25 +14,25 @@ scripts.patterns = scripts.patterns or { } scripts.patterns.list = { { "??", "hyph-ar.tex", "arabic" }, { "bg", "hyph-bg.tex", "bulgarian" }, --- { "ca", "hyph-ca.tex", "" }, + { "ca", "hyph-ca.tex", "catalan" }, { "??", "hyph-cop.tex", "coptic" }, { "cs", "hyph-cs.tex", "czech" }, - { "??", "hyph-cy.tex", "welsh" }, + { "cy", "hyph-cy.tex", "welsh" }, { "da", "hyph-da.tex", "danish" }, { "deo", "hyph-de-1901.tex", "german, old spelling" }, { "de", "hyph-de-1996.tex", "german, new spelling" }, --~ { "??", "hyph-el-monoton.tex", "" }, --~ { "??", "hyph-el-polyton.tex", "" }, ---~ { "agr", "hyph-grc", "ancient greek" }, + { "agr", "hyph-grc", "ancient greek" }, --~ { "???", "hyph-x-ibycus", "ancient greek in ibycus encoding" }, --~ { "gr", "", "" }, - { "??", "hyph-eo.tex", "esperanto" }, + { "eo", "hyph-eo.tex", "esperanto" }, { "gb", "hyph-en-gb.tex", "british english" }, { "us", "hyph-en-us.tex", "american english" }, { "es", "hyph-es.tex", "spanish" }, { "et", "hyph-et.tex", "estonian" }, { "eu", "hyph-eu.tex", "basque" }, -- ba is Bashkir! - { "??", "hyph-fa.tex", "farsi" }, + { "fa", "hyph-fa.tex", "farsi" }, { "fi", "hyph-fi.tex", "finnish" }, { "fr", "hyph-fr.tex", "french" }, -- { "??", "hyph-ga.tex", "" }, @@ -43,11 +43,11 @@ scripts.patterns.list = { { "hu", "hyph-hu.tex", "hungarian" }, { "??", "hyph-ia.tex", "interlingua" }, { "??", "hyph-id.tex", "indonesian" }, - { "??", "hyph-is.tex", "icelandic" }, + { "is", "hyph-is.tex", "icelandic" }, { "it", "hyph-it.tex", "italian" }, { "la", "hyph-la.tex", "latin" }, - { "??", "hyph-mn-cyrl.tex", "mongolian, cyrillic script" }, - { "??", "hyph-mn-cyrl-x-new.tex", "mongolian, cyrillic script (new patterns)" }, + { "lt", "hyph-lt.tex", "lithuanian" }, + { "mn", "hyph-mn-cyrl.tex", "mongolian, cyrillic script" }, { "nb", "hyph-nb.tex", "norwegian bokmål" }, { "nl", "hyph-nl.tex", "dutch" }, { "nn", "hyph-nn.tex", "norwegian nynorsk" }, @@ -55,13 +55,14 @@ scripts.patterns.list = { { "pt", "hyph-pt.tex", "portuguese" }, { "ro", "hyph-ro.tex", "romanian" }, { "ru", "hyph-ru.tex", "russian" }, - { "sk", "hyph-sk.tex", "" }, + { "sk", "hyph-sk.tex", "slovak" }, { "sl", "hyph-sl.tex", "slovenian" }, - { "??", "hyph-sr-cyrl.tex", "serbian" }, + { "sr", "hyph-sr-cyrl.tex", "serbian" }, { "sv", "hyph-sv.tex", "swedish" }, { "tr", "hyph-tr.tex", "turkish" }, + { "tk", "hyph-tk.tex", "turkman" }, { "uk", "hyph-uk.tex", "ukrainian" }, - { "??", "hyph-zh-latn.tex", "zh-latn, chinese Pinyin" }, + { "zh", "hyph-zh-latn.tex", "zh-latn, chinese Pinyin" }, } @@ -152,7 +153,7 @@ function scripts.patterns.load(path,name,mnemonic,fullcheck) end h.patterns = nil h.hyphenation = nil - for k, v in pairs(h) do + for k, v in next, h do if not permitted_commands[k] then okay = false end if mnemonic then logs.simple("command \\%s found in language %s, file %s, n=%s",k,mnemonic,name,v) @@ -161,7 +162,7 @@ function scripts.patterns.load(path,name,mnemonic,fullcheck) end end if not environment.argument("fast") then - for k, v in pairs(c) do + for k, v in next, c do if mnemonic then logs.simple("command \\%s found in comment of language %s, file %s, n=%s",k,mnemonic,name,v) else @@ -221,7 +222,7 @@ function scripts.patterns.load(path,name,mnemonic,fullcheck) end end local stripped = { } - for k, v in pairs(p) do + for k, v in next, p do if mnemonic then logs.simple("invalid character %s (0x%04X) in patterns of language %s, file %s, n=%s",char(k),k,mnemonic,name,v) else @@ -233,7 +234,7 @@ function scripts.patterns.load(path,name,mnemonic,fullcheck) stripped[k] = true end end - for k, v in pairs(h) do + for k, v in next, h do if mnemonic then logs.simple("invalid character %s (0x%04X) in exceptions of language %s, file %s, n=%s",char(k),k,mnemonic,name,v) else @@ -246,7 +247,7 @@ function scripts.patterns.load(path,name,mnemonic,fullcheck) end end local stripset = "" - for k, v in pairs(stripped) do + for k, v in next, stripped do logs.simple("entries that contain character %s will be omitted",char(k)) stripset = stripset .. "%" .. char(k) end @@ -292,8 +293,10 @@ end function scripts.patterns.check() local path = environment.argument("path") or "." local found = false - if #environment.files > 0 then - for _, name in ipairs(environment.files) do + local files = environment.files + if #files > 0 then + for i=1,#files do + local name = files[i] logs.simple("checking language file %s", name) local okay = scripts.patterns.load(path,name,nil,not environment.argument("fast")) if #environment.files > 1 then @@ -301,7 +304,7 @@ function scripts.patterns.check() end end else - for k, v in pairs(scripts.patterns.list) do + for k, v in next, scripts.patterns.list do local mnemonic, name = v[1], v[2] logs.simple("checking language %s, file %s", mnemonic, name) local okay = scripts.patterns.load(path,name,mnemonic,not environment.argument("fast")) @@ -322,7 +325,7 @@ function scripts.patterns.convert() if path == destination then logs.simple("source path and destination path should differ (use --path and/or --destination)") else - for k, v in pairs(scripts.patterns.list) do + for k, v in next, scripts.patterns.list do local mnemonic, name = v[1], v[2] logs.simple("converting language %s, file %s", mnemonic, name) local okay, patterns, hyphenations, comment, stripped, pused, hused = scripts.patterns.load(path,name,false) @@ -337,7 +340,7 @@ function scripts.patterns.convert() end end -logs.extendbanner("Pattern Tools 0.20",true) +logs.extendbanner("ConTeXt Pattern File Management 0.20",true) messages.help = [[ --convert generate context language files (mnemonic driven, if not given then all) @@ -360,3 +363,4 @@ end -- mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns -- mtxrun --script pattern --check --fast --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns -- mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns --destination=e:/tmp/patterns +-- mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/branches/luatex/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns diff --git a/Master/texmf-dist/scripts/context/lua/mtx-profile.lua b/Master/texmf-dist/scripts/context/lua/mtx-profile.lua index d99f7e926bc..11d48d0399b 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-profile.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-profile.lua @@ -54,9 +54,13 @@ function scripts.profiler.analyse(filename) f:close() print("") local loaded = { } - for _, filename in ipairs(table.sortedkeys(times)) do + local sortedtable.sortedkeys(times) + for i=1,#sorted do + local filename = sorted[i] local functions = times[filename] - for _, functionname in ipairs(table.sortedkeys(functions)) do + local sorted = table.sortedkeys(functions) + for i=1,#sorted do + local functionname = sorted[i] local totaltime = functions[functionname] local count = counts[functionname] totalcount = totalcount + count @@ -81,7 +85,9 @@ function scripts.profiler.analyse(filename) end end print("") - for _, call in ipairs(table.sortedkeys(calls)) do + local sorted = table.sortedkeys(calls) + for i=1,#sorted do + local call = sorted[i] local n = calls[call] totalcalls = totalcalls + n if n > callthreshold then @@ -95,7 +101,7 @@ function scripts.profiler.analyse(filename) end end -function scripts.profiler.analyse(filename) +function scripts.profiler.x_analyse(filename) local f = io.open(filename) local calls = { } local lines = 0 @@ -148,7 +154,7 @@ end --~ scripts.profiler.analyse("t:/manuals/mk/mk-fonts-profile.lua") --~ scripts.profiler.analyse("t:/manuals/mk/mk-introduction-profile.lua") -logs.extendbanner("LuaTeX Profiler 1.00",true) +logs.extendbanner("ConTeXt MkIV LuaTeX Profiler 1.00",true) messages.help = [[ --analyse analyse lua calls diff --git a/Master/texmf-dist/scripts/context/lua/mtx-scite.lua b/Master/texmf-dist/scripts/context/lua/mtx-scite.lua new file mode 100644 index 00000000000..d5f0a5344e5 --- /dev/null +++ b/Master/texmf-dist/scripts/context/lua/mtx-scite.lua @@ -0,0 +1,166 @@ +if not modules then modules = { } end modules ['mtx-scite'] = { + version = 1.001, + comment = "companion to mtxrun.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: append to global properties else order of loading problem +-- linux problem ... files are under root protection so we need --install + +scripts = scripts or { } +scripts.scite = scripts.scite or { } + +local scitesignals = { "scite-context.rme", "context.properties" } +local screenfont = "lmtypewriter10-regular.ttf" + +function scripts.scite.start(indeed) + local usedsignal, datapath, fullname, workname, userpath, fontpath + if os.type == "windows" then + workname = "scite.exe" + userpath = os.getenv("USERPROFILE") or "" + fontpath = os.getenv("SYSTEMROOT") + fontpath = (fontpath and file.join(fontpath,"fonts")) or "" + else + workname = "scite" + userpath = os.getenv("HOME") or "" + fontpath = "" + end + local binpaths = file.split_path(os.getenv("PATH")) or file.split_path(os.getenv("path")) + for i=1,#scitesignals do + local scitesignal = scitesignals[i] + local scitepath = resolvers.find_file(scitesignal,"other text files") or "" + if scitepath ~= "" then + scitepath = file.dirname(scitepath) -- data + if scitepath == "" then + scitepath = resolvers.clean_path(lfs.currentdir()) + else + usedsignal, datapath = scitesignal, scitepath + break + end + end + end + if not datapath or datapath == "" then + logs.simple("invalid datapath, maybe you need to regenerate the file database") + return false + end + if not binpaths or #binpaths == 0 then + logs.simple("invalid binpath") + return false + end + for i=1,#binpaths do + local p = file.join(binpaths[i],workname) + if lfs.isfile(p) and lfs.attributes(p,"size") > 10000 then -- avoind stub + fullname = p + break + end + end + if not fullname then + logs.simple("unable to locate %s",workname) + return false + end + local properties = dir.glob(file.join(datapath,"*.properties")) + local luafiles = dir.glob(file.join(datapath,"*.lua")) + local extrafont = resolvers.find_file(screenfont,"truetype font") or "" + local pragmafound = dir.glob(file.join(datapath,"pragma.properties")) + if userpath == "" then + logs.simple("unable to figure out userpath") + return false + end + local verbose = environment.argument("verbose") + local tobecopied, logdata = { }, { } + local function check_state(fullname,newpath) + local basename = file.basename(fullname) + local destination = file.join(newpath,basename) + local pa, da = lfs.attributes(fullname), lfs.attributes(destination) + if not da then + logdata[#logdata+1] = { "new : %s", basename } + tobecopied[#tobecopied+1] = { fullname, destination } + elseif pa.modification > da.modification then + logdata[#logdata+1] = { "outdated : %s", basename } + tobecopied[#tobecopied+1] = { fullname, destination } + else + logdata[#logdata+1] = { "up to date : %s", basename } + end + end + for i=1,#properties do + check_state(properties[i],userpath) + end + for i=1,#luafiles do + check_state(luafiles[i],userpath) + end + if fontpath ~= "" then + check_state(extrafont,fontpath) + end + local userpropfile = "SciTEUser.properties" + if os.name ~= "windows" then + userpropfile = "." .. userpropfile + end + local fullpropfile = file.join(userpath,userpropfile) + local userpropdata = io.loaddata(fullpropfile) or "" + local propfiledone = false + if pragmafound then + if userpropdata == "" then + logdata[#logdata+1] = { "error : no user properties found on '%s'", fullpropfile } + elseif string.find(userpropdata,"import *pragma") then + logdata[#logdata+1] = { "up to date : 'import pragma' in '%s'", userpropfile } + else + logdata[#logdata+1] = { "yet unset : 'import pragma' in '%s'", userpropfile } + userproperties = userpropdata .. "\n\nimport pragma\n\n" + propfiledone = true + end + else + if string.find(userpropdata,"import *context") then + logdata[#logdata+1] = { "up to date : 'import context' in '%s'", userpropfile } + else + logdata[#logdata+1] = { "yet unset : 'import context' in '%s'", userpropfile } + userproperties = userpropdata .. "\n\nimport context\n\n" + propfiledone = true + end + end + if not indeed or verbose then + logs.simple("used signal: %s", usedsignal) + logs.simple("data path : %s", datapath) + logs.simple("full name : %s", fullname) + logs.simple("user path : %s", userpath) + logs.simple("extra font : %s", extrafont) + end + if #logdata > 0 then + logs.simple("") + for k=1,#logdata do + local v = logdata[k] + logs.simple(v[1],v[2]) + end + end + if indeed then + if #tobecopied > 0 then + logs.simple("warning : copying updated files") + for i=1,#tobecopied do + local what = tobecopied[i] + logs.simple("copying : '%s' => '%s'",what[1],what[2]) + file.copy(what[1],what[2]) + end + end + if propfiledone then + logs.simple("saving : '%s'",userpropfile) + io.savedata(fullpropfile,userpropdata) + end + os.launch(fullname) + end +end + +logs.extendbanner("Scite Startup Script 1.00",true) + +messages.help = [[ +--start [--verbose] start scite +--test report what will happen +]] + +if environment.argument("start") then + scripts.scite.start(true) +elseif environment.argument("test") then + scripts.scite.start() +else + logs.help(messages.help) +end diff --git a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua index f593c2e351d..b2a993bf836 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-fonttest.lua @@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['mtx-server-ctx-fonttest'] = { license = "see context related readme files" } -dofile(resolvers.find_file("l-aux.lua","tex")) +--~ dofile(resolvers.find_file("l-aux.lua","tex")) dofile(resolvers.find_file("trac-lmx.lua","tex")) dofile(resolvers.find_file("font-ott.lua","tex")) dofile(resolvers.find_file("font-syn.lua","tex")) @@ -21,8 +21,11 @@ local temppath = caches.setpath("temp","mtx-server-ctx-fonttest") local basename = "mtx-server-ctx-fonttest-data.lua" local basepath = temppath -for _, suffix in ipairs { "tex", "pdf", "log" } do - os.remove(file.join(temppath,file.addsuffix(tempname,suffix))) +local remove_suffixes = { "tex", "pdf", "log" } +local what_options = { "trace", "basemode" } + +for i=1,#remove_suffixes do + os.remove(file.join(temppath,file.addsuffix(tempname,remove_suffixes[i]))) end local process_templates = { } @@ -30,7 +33,7 @@ local process_templates = { } process_templates.default = [[ \starttext \setcharactermirroring[1] - \definefontfeature[sample][%s] + \definefontfeature[sample][analyse=yes,%s] \definedfont[name:%s*sample] \startTEXpage[offset=3pt] \detokenize{%s} @@ -147,9 +150,10 @@ local cache = { } local function showfeatures(f) if f then + logs.simple("processing font '%s'",f) local features = cache[f] if features == nil then - features = fonts.get_features(f) + features = fonts.get_features(resolvers.find_file(f)) if not features then logs.simple("building cache for '%s'",f) io.savedata(file.join(temppath,file.addsuffix(tempname,"tex")),format(process_templates.cache,f,f)) @@ -166,18 +170,18 @@ local function showfeatures(f) local function show(what) local data = features[what] if data and next(data) then - for f,ff in pairs(data) do + for f,ff in next, data do if find(f,"<") then -- ignore aat for the moment else fea[f] = true - for s, ss in pairs(ff) do + for s, ss in next, ff do if find(s,"%*") then -- ignore * else scr[s] = true local rs = rev[s] if not rs then rs = {} rev[s] = rs end - for k, l in pairs(ss) do + for k, l in next, ss do if find(k,"%*") then -- ignore * else @@ -192,16 +196,16 @@ local function showfeatures(f) end end end - for what, v in table.sortedpairs(features) do + for what, v in table.sortedhash(features) do show(what) end local stupid = { } stupid[#stupid+1] = "var feature_hash = new Array ;" - for s, sr in pairs(rev) do + for s, sr in next, rev do stupid[#stupid+1] = format("feature_hash['%s'] = new Array ;",s) - for l, lr in pairs(sr) do + for l, lr in next, sr do stupid[#stupid+1] = format("feature_hash['%s']['%s'] = new Array ;",s,l) - for f, fr in pairs(lr) do + for f, fr in next, lr do stupid[#stupid+1] = format("feature_hash['%s']['%s']['%s'] = true ;",s,l,f) end end @@ -217,25 +221,50 @@ local function showfeatures(f) end end +local template_h = [[ + + safe name     + family name     + style-variant-weight-width     + font name     + weight     + filename +]] + +local template_d = [[ + + %s     + %s     + %s-%s-%s-%s     + %s     + %s     + %s +]] + local function select_font() - local t = fonts.names.list(".*") + local t = fonts.names.list(".*",false,true) if t then local listoffonts = { } - local t = fonts.names.list(".*") - if t then - listoffonts[#listoffonts+1] = "" - listoffonts[#listoffonts+1] = "" - for k, id in ipairs(table.sortedkeys(t)) do - local ti = t[id] - local type, name, file, sub = ti[1], ti[2], ti[3], ti[4] - if type == "otf" or type == "ttf" or type == "ttc" then - if sub then sub = "(sub)" else sub = "" end - listoffonts[#listoffonts+1] = format("",id,id,name,file,sub,type) - end + listoffonts[#listoffonts+1] = "
safe namefont namefilenamesub font type
%s%s%s%s%s
" + listoffonts[#listoffonts+1] = template_h + for k, v in table.sortedhash(t) do + local kind = v.format + if kind == "otf" or kind == "ttf" or kind == "ttc" then + local fontname = v.fontname + listoffonts[#listoffonts+1] = format(template_d, fontname, fontname, + v.familyname or "", + t.variant or "normal", + t.weight or "normal", + t.width or "normal", + t.style or "normal", + v.rawname or fontname, + v.fontweight or "", + v.filename or "" + ) end - listoffonts[#listoffonts+1] = "
" - return concat(listoffonts,"\n") end + listoffonts[#listoffonts+1] = "" + return concat(listoffonts,"\n") end return "no fonts" end @@ -260,65 +289,80 @@ local result_template = [[ scripts.webserver.registerpath(temppath) +local function get_specification(name) + return fonts.names.resolvedspecification(name or "") +end + local function edit_font(currentfont,detail,tempname) - local fontname, fontfile, issub = fonts.names.specification(currentfont or "") - local htmldata = showfeatures(fontfile) - if htmldata then - local features, languages, scripts, options = { }, { }, { }, { } - for k,v in ipairs(table.sortedkeys(htmldata.scripts)) do - local s = fonts.otf.tables.scripts[v] or v - if detail and v == detail.script then - scripts[#scripts+1] = format(" %s",s,v,v,v,v) - else - scripts[#scripts+1] = format(" %s",s,v,v,v,v) + logs.simple("entering edit mode for '%s'",currentfont) + local specification = get_specification(currentfont) + if specification then + local htmldata = showfeatures(specification.filename) + if htmldata then + local features, languages, scripts, options = { }, { }, { }, { } + local sorted = table.sortedkeys(htmldata.scripts) + for k=1,#sorted do + local v = sorted[k] + local s = fonts.otf.tables.scripts[v] or v + if detail and v == detail.script then + scripts[#scripts+1] = format(" %s",s,v,v,v,v) + else + scripts[#scripts+1] = format(" %s",s,v,v,v,v) + end end - end - for k,v in ipairs(table.sortedkeys(htmldata.languages)) do - local l = fonts.otf.tables.languages[v] or v - if detail and v == detail.language then - languages[#languages+1] = format(" %s",l,v,v,v,v) - else - languages[#languages+1] = format(" %s",l,v,v,v,v) + local sorted = table.sortedkeys(htmldata.languages) + for k=1,#sorted do + local v = sorted[k] + local l = fonts.otf.tables.languages[v] or v + if detail and v == detail.language then + languages[#languages+1] = format(" %s",l,v,v,v,v) + else + languages[#languages+1] = format(" %s",l,v,v,v,v) + end end - end - for k,v in ipairs(table.sortedkeys(htmldata.features)) do - local f = fonts.otf.tables.features[v] or v - if detail and detail["f-"..v] then - features[#features+1] = format(" %s",f,v,v,v,v) - else - features[#features+1] = format(" %s",f,v,v,v,v) + local sorted = table.sortedkeys(htmldata.features) + for k=1,#sorted do + local v = sorted[k] + local f = fonts.otf.tables.features[v] or v + if detail and detail["f-"..v] then + features[#features+1] = format(" %s",f,v,v,v,v) + else + features[#features+1] = format(" %s",f,v,v,v,v) + end end - end - for k, v in ipairs { "trace", "basemode" } do - if detail and detail["o-"..v] then - options[#options+1] = format(" %s",v,v,v) + for k=1,#what_options do + local v = what_options[k] + if detail and detail["o-"..v] then + options[#options+1] = format(" %s",v,v,v) + else + options[#options+1] = format(" %s",v,v,v) + end + end + local e = format(edit_template, + (detail and detail.sampletext) or sample_line,(detail and detail.name) or "no name",(detail and detail.title) or "", + concat(scripts," "),concat(languages," "),concat(features," "),concat(options," ")) + if tempname then + local pdffile, texfile = file.addsuffix(tempname,"pdf"), file.addsuffix(tempname,"tex") + local r = format(result_template,pdffile,texfile,pdffile) + return e .. r, htmldata.javascript or "" else - options[#options+1] = format(" %s",v,v,v) + return e, htmldata.javascript or "" end - end - local e = format(edit_template, - (detail and detail.sampletext) or sample_line,(detail and detail.name) or "no name",(detail and detail.title) or "", - concat(scripts," "),concat(languages," "),concat(features," "),concat(options," ")) - if tempname then - local pdffile, texfile = file.addsuffix(tempname,"pdf"), file.addsuffix(tempname,"tex") - local r = format(result_template,pdffile,texfile,pdffile) - return e .. r, htmldata.javascript or "" else - return e, htmldata.javascript or "" + return "error, nothing set up yet" end else - return "error, nothing set up yet" + return "error, no info about font" end end local function process_font(currentfont,detail) -- maybe just fontname - local fontname, fontfile, issub = fonts.names.specification(currentfont or "") local features = { "mode=node", format("language=%s",detail.language or "dflt"), format("script=%s",detail.script or "dflt"), } - for k,v in pairs(detail) do + for k,v in next, detail do local f = match(k,"^f%-(.*)$") if f then features[#features+1] = format("%s=yes",f) @@ -361,25 +405,38 @@ local function show_log(currentfont,detail) end local function show_font(currentfont,detail) - local fontname, fontfile, issub = fonts.names.specification(currentfont or "") - local features = fonts.get_features(fontfile) + local specification = get_specification(currentfont) + local features = fonts.get_features(specification.filename) local result = { } result[#result+1] = format("

names

",what) result[#result+1] = "" - result[#result+1] = format("",currentfont) - result[#result+1] = format("",fontname) - result[#result+1] = format("",fontfile) + result[#result+1] = format("",currentfont) + result[#result+1] = format("",specification.fontname or "-") + result[#result+1] = format("",specification.fontfile or "-") + result[#result+1] = format("",specification.familyname or "-") + result[#result+1] = format("",specification.fontweight or "-") + result[#result+1] = format("",specification.format or "-") + result[#result+1] = format("",specification.fullname or "-") + result[#result+1] = format("",specification.subfamily or "-") + result[#result+1] = format("",specification.rawname or "-") + result[#result+1] = format("",specification.designsize or "-") + result[#result+1] = format("",specification.minsize or "-") + result[#result+1] = format("",specification.maxsize or "-") + result[#result+1] = format("",specification.style ~= "" and specification.style or "normal") + result[#result+1] = format("",specification.variant ~= "" and specification.variant or "normal") + result[#result+1] = format("",specification.weight ~= "" and specification.weight or "normal") + result[#result+1] = format("",specification.width ~= "" and specification.width or "normal") result[#result+1] = "
fontname:%s
fullname:%s
filename:%s
fontname: %s
fullname: %s
filename: %s
familyname: %s
fontweight: %s
format: %s
fullname: %s
subfamily: %s
rawname: %s
designsize: %s
minimumsize:%s
maximumsize:%s
style: %s
variant: %s
weight: %s
width: %s
" if features then - for what, v in table.sortedpairs(features) do + for what, v in table.sortedhash(features) do local data = features[what] if data and next(data) then result[#result+1] = format("

%s features

",what) result[#result+1] = "" result[#result+1] = "" - for f,ff in table.sortedpairs(data) do + for f,ff in table.sortedhash(data) do local done = false - for s, ss in table.sortedpairs(ff) do + for s, ss in table.sortedhash(ff) do if s == "*" then s = "all" end if ss ["*"] then ss["*"] = nil ss.all = true end if done then @@ -457,10 +514,10 @@ local function loadstored(detail,currentfont,name) detail.title = storage.title or detail.title detail.sampletext = storage.text or detail.sampletext detail.name = name or "no name" - for k,v in pairs(storage.features) do + for k,v in next, storage.features do detail["f-"..k] = v end - for k,v in pairs(storage.options) do + for k,v in next, storage.options do detail["o-"..k] = v end end @@ -486,19 +543,20 @@ local function deletestored(detail,currentfont,name) end local function save_font(currentfont,detail) - local fontname, fontfile, issub = fonts.names.specification(currentfont or "") + local specification = get_specification(currentfont) local name, title, script, language, features, options, text = currentfont, "", "dflt", "dflt", { }, { }, "" if detail then - local htmldata = showfeatures(fontfile) + local htmldata = showfeatures(specification.filename) script = detail.script or script language = detail.language or language text = string.strip(detail.sampletext or text) name = string.strip(detail.name or name) title = string.strip(detail.title or title) - for k,v in pairs(htmldata.features) do + for k,v in next, htmldata.features do if detail["f-"..k] then features[k] = true end end - for k,v in ipairs { "trace", "basemode" } do + for k=1,#what_options do + local v = what_options[k] if detail["o-"..v] then options[k] = true end end end @@ -518,8 +576,8 @@ local function load_font(currentfont) local storage = loadbase(datafile) local result = {} result[#result+1] = format("") - for k,v in table.sortedpairs(storage) do - local fontname, fontfile, issub = fonts.names.specification(v.font or "") + for k,v in table.sortedhash(storage) do + local fontname, fontfile = get_specification(v.font) result[#result+1] = format("", k,k,k,v.font,fontname,v.script,v.language,concat(table.sortedkeys(v.features)," "),v.title or "no title",v.text or "") end @@ -562,6 +620,13 @@ local status_template = [[ ]] +local variables = { + ['color-background-one'] = lmx.get('color-background-green'), + ['color-background-two'] = lmx.get('color-background-blue'), + ['title'] = 'ConTeXt Font Tester', + ['formaction'] = "mtx-server-ctx-fonttest.lua", +} + function doit(configuration,filename,hashed) local start = os.clock() @@ -589,65 +654,52 @@ function doit(configuration,filename,hashed) action = "extras" end - lmx.restore() - - local fontname, fontfile, issub = fonts.names.specification(currentfont or "") + local fontname, fontfile = get_specification(currentfont) if fontfile then - lmx.variables['title-default'] = format('ConTeXt Font Tester: %s (%s)',fontname,fontfile) + variables.title = format('ConTeXt Font Tester: %s (%s)',fontname,fontfile) else - lmx.variables['title-default'] = 'ConTeXt Font Tester' + variables.title = 'ConTeXt Font Tester' end - lmx.variables['color-background-green'] = '#4F6F6F' - lmx.variables['color-background-blue'] = '#6F6F8F' - lmx.variables['color-background-yellow'] = '#8F8F6F' - lmx.variables['color-background-purple'] = '#8F6F8F' - - lmx.variables['color-background-body'] = '#808080' - lmx.variables['color-background-main'] = '#3F3F3F' - lmx.variables['color-background-one'] = lmx.variables['color-background-green'] - lmx.variables['color-background-two'] = lmx.variables['color-background-blue'] - - lmx.variables['title'] = lmx.variables['title-default'] - - lmx.set('title', lmx.get('title')) - lmx.set('color-background-one', lmx.get('color-background-green')) - lmx.set('color-background-two', lmx.get('color-background-blue')) - -- lua table and adapt - lmx.set('formaction', "mtx-server-ctx-fonttest.lua") + local buttons = { 'process', 'select', 'save', 'load', 'edit', 'reset', 'features', 'source', 'log', 'info', 'extras'} + local menu = { } - local menu = { } - for k, v in ipairs { 'process', 'select', 'save', 'load', 'edit', 'reset', 'features', 'source', 'log', 'info', 'extras'} do - menu[#menu+1] = format("",v,v) + for i=1,#buttons do + local button = buttons[i] + menu[#menu+1] = format("",button,button) end - lmx.set('menu', concat(menu," ")) - logs.simple("action: %s",action or "no action") + variables.menu = concat(menu," ") + variables.status = format(status_template,currentfont or "") + variables.maintext = "" + variables.javascriptdata = "" + variables.javascripts = "" + variables.javascriptinit = "" - lmx.set("status",format(status_template,currentfont or "")) + logs.simple("action: %s",action or "no action") local result if action == "select" then - lmx.set('maintext',select_font()) + variables.maintext = select_font() elseif action == "info" then - lmx.set('maintext',info_about()) + variables.maintext = info_about() elseif action == "extras" then - lmx.set('maintext',do_extras()) + variables.maintext = do_extras() elseif currentfont and currentfont ~= "" then if action == "save" then - lmx.set('maintext',save_font(currentfont,detail)) + variables.maintext = save_font(currentfont,detail) elseif action == "load" then - lmx.set('maintext',load_font(currentfont,detail)) + variables.maintext = load_font(currentfont,detail) elseif action == "source" then - lmx.set('maintext',show_source(currentfont,detail)) + variables.maintext = show_source(currentfont,detail) elseif action == "log" then - lmx.set('maintext',show_log(currentfont,detail)) + variables.maintext = show_log(currentfont,detail) elseif action == "features" then - lmx.set('maintext',show_font(currentfont,detail)) + variables.maintext = show_font(currentfont,detail) else local e, s if action == "process" then @@ -659,16 +711,16 @@ function doit(configuration,filename,hashed) else e, s = process_font(currentfont,detail) end - lmx.set('maintext',e) - lmx.set('javascriptdata',s) - lmx.set('javascripts',javascripts) - lmx.set('javascriptinit', "check_form()") + variables.maintext = e + variables.javascriptdata = s + variables.javascripts = javascripts + variables.javascriptinit = "check_form()" end else - lmx.set('maintext',select_font()) + variables.maintext = select_font() end - result = { content = lmx.convert('context-fonttest.lmx') } + result = { content = lmx.convert('context-fonttest.lmx',false,variables) } logs.simple("time spent on page: %0.03f seconds",os.clock()-start) diff --git a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua index c53d9f6e0f2..2f072f97747 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-help.lua @@ -6,8 +6,10 @@ if not modules then modules = { } end modules ['mtx-server-ctx-help'] = { license = "see context related readme files" } ---~ dofile(resolvers.find_file("l-xml.lua","tex")) -dofile(resolvers.find_file("l-aux.lua","tex")) +-- todo in lua interface: noargument, oneargument, twoarguments, threearguments + +--~ dofile(resolvers.find_file("l-aux.lua","tex")) +--~ dofile(resolvers.find_file("l-url.lua","tex")) dofile(resolvers.find_file("trac-lmx.lua","tex")) -- problem ... serialize parent stack @@ -277,18 +279,23 @@ document.setups.translations = document.setups.translations or { } document.setups.formats = { - interface = [[%s]], - href = [[%s]], - source = [[%s]], - optional_single = "[optional string %s]", - optional_list = "[optional list %s]", - mandate_single = "[mandate string %s]", - mandate_list = "[mandate list %s]", - parameter = [[]], - parameters = [[
featuretag script languages 
del name font fontname script language features title sampletext 
x %s %s %s %s %s %s %s %s 
%s%s%s
%s
]], - listing = [[
%s]],
-    special = "%s",
-    default = "%s",
+    open_command    = { [[\%s]], [[context.%s (]] },
+    close_command   = { [[]], [[ )]] },
+    connector       = { [[]], [[, ]] },
+    href_in_list    = { [[%s]], [[%s]] },
+    href_as_command = { [[\%s]], [[context.%s]] },
+    interface       = [[%s]],
+    source          = [[%s]],
+    modes           = { [[lua mode]], [[tex mode]] },
+    optional_single = { "[optional string %s]", "{optional string %s}" },
+    optional_list   = { "[optional list %s]", "{optional table %s}" } ,
+    mandate_single  = { "[mandate string %s]", "{mandate string %s}" },
+    mandate_list    = { "[mandate list %s]", "{mandate list %s}" },
+    parameter       = [[%s%s%s]],
+    parameters      = [[%s
]], + listing = [[
%s]],
+    special         = [[%s]],
+    default         = [[%s]],
 }
 
 local function translate(tag,int,noformat)
@@ -298,7 +305,7 @@ local function translate(tag,int,noformat)
     if noformat then
         return ti[tag] or te[tag] or tag
     else
-        return document.setups.formats.special:format(ti[tag] or te[tag] or tag)
+        return format(document.setups.formats.special,ti[tag] or te[tag] or tag)
     end
 end
 
@@ -307,7 +314,7 @@ local function translated(e,int)
     local s = attributes.type or "?"
     local tag = s:match("^cd:(.*)$")
     if attributes.default == "yes" then
-        return document.setups.formats.default:format(tag)
+        return format(document.setups.formats.default,tag or "?")
     elseif tag then
         return translate(tag,int)
     else
@@ -318,7 +325,8 @@ end
 document.setups.loaded = document.setups.loaded or { }
 
 document.setups.current = { }
-document.setups.showsources = false
+document.setups.showsources = true
+document.setups.mode = 1
 
 function document.setups.load(filename)
     filename = resolvers.find_file(filename) or ""
@@ -358,16 +366,15 @@ end
 
 function document.setups.csname(ek,int)
     local cs = ""
-    local at = ek.at
+    local at = ek.at or { }
     if at.type == 'environment' then
         cs = translate("start",int,true) .. cs
     end
-    for r, d, k in xml.elements(ek,'cd:sequence/(cd:string|variable)') do
-        local dk = d[k]
-        if dk.tg == "string" then
-            cs = cs .. dk.at.value
+    for e in xml.collected(ek,'cd:sequence/(cd:string|variable)') do
+        if e.tg == "string" then
+            cs = cs .. e.at.value
         else
-            cs = cs .. dk.at.value -- to be translated
+            cs = cs .. e.at.value -- to be translated
         end
     end
     return cs
@@ -380,9 +387,8 @@ function document.setups.names()
         names = { }
         local name = document.setups.name
         local csname = document.setups.csname
-        for r, d, k in xml.elements(current.root,'cd:command') do
-            local dk = d[k]
-            names[#names+1] = { dk.at.name, csname(dk,int) }
+        for e in xml.collected(current.root,'cd:command') do
+            names[#names+1] = { e.at.name, csname(e,int) }
         end
         table.sort(names, function(a,b) return a[2]:lower() < b[2]:lower() end)
         current.names = names
@@ -403,8 +409,9 @@ end
 function document.setups.showused()
     local current = document.setups.current
     if current.root and next(current.used) then
-        for k,v in ipairs(table.sortedkeys(current.used)) do
-            xml.sprint(current.used[v])
+        local sorted = table.sortedkeys(current.used)
+        for i=1,#sorted do
+            xml.sprint(current.used[sorted[i]])
         end
     end
 end
@@ -412,12 +419,12 @@ function document.setups.showall()
     local current = document.setups.current
     if current.root then
         local list = { }
-        xml.each_element(current.root,"cd:command", function(r,d,t)
-            local ek = d[t]
-            list[document.setups.name(ek)] = ek
-        end )
-        for k,v in ipairs(table.sortedkeys(list)) do
-            xml.sprint(list[v])
+        for e in xml.collected(current.root,"cd:command") do
+            list[document.setups.name(e)] = e
+        end
+        local sorted = table.sortedkeys(list)
+        for i=1,#sorted do
+            xml.sprint(list[sorted[i]])
         end
     end
 end
@@ -431,46 +438,56 @@ function document.setups.resolve(name)
     end
 end
 
-function document.setups.collect(name,int)
+function document.setups.collect(name,int,lastmode)
     local current = document.setups.current
     local formats = document.setups.formats
-    local command = xml.filter(current.root,format("cd:command[@name='%s']",name))
+    local command = xml.filter(current.root,format("cd:command[@name='%s']/first()",name))
     if command then
-        local attributes = command.at
+        local attributes = command.at or { }
         local data = {
             command = command,
             category = attributes.category or "",
         }
         if document.setups.showsources then
-            data.source = (attributes.file and formats.source:format(attributes.file,attributes.file)) or ""
+            data.source = (attributes.file and formats.source:format(attributes.file,lastmode,attributes.file)) or ""
         else
             data.source = attributes.file or ""
         end
-        local sequence, n = { "\\" .. document.setups.csname(command,int) }, 0
-        local arguments = { }
+        local n, sequence, tags = 0, { }, { }
+        sequence[#sequence+1] = formats.open_command[lastmode]:format(document.setups.csname(command,int))
+        local arguments, tag = { }, ""
         for r, d, k in xml.elements(command,"(cd:keywords|cd:assignments)") do
             n = n + 1
             local attributes = d[k].at
+            if #sequence > 1 then
+                local c = formats.connector[lastmode]
+                if c ~= "" then
+                    sequence[#sequence+1] = c
+                end
+            end
             if attributes.optional == 'yes' then
                 if attributes.list == 'yes' then
-                    sequence[#sequence+1] = formats.optional_list:format(n)
+                    tag = formats.optional_list[lastmode]:format(n)
                 else
-                    sequence[#sequence+1] = formats.optional_single:format(n)
+                    tag = formats.optional_single[lastmode]:format(n)
                 end
             else
                 if attributes.list == 'yes' then
-                    sequence[#sequence+1] = formats.mandate_list:format(n)
+                    tag = formats.mandate_list[lastmode]:format(n)
                 else
-                    sequence[#sequence+1] = formats.mandate_single:format(n)
+                    tag = formats.mandate_single[lastmode]:format(n)
                 end
             end
+            sequence[#sequence+1] = tag
+            tags[#tags+1] = tag
         end
+        sequence[#sequence+1] = formats.close_command[lastmode]
         data.sequence = concat(sequence, " ")
         local parameters, n = { }, 0
         for r, d, k in xml.elements(command,"(cd:keywords|cd:assignments)") do
             n = n + 1
             if d[k].tg == "keywords" then
-                local left = sequence[n+1]
+                local left = tags[n]
                 local right = { }
                 for r, d, k in xml.elements(d[k],"(cd:constant|cd:resolve)") do
                     local tag = d[k].tg
@@ -488,13 +505,13 @@ function document.setups.collect(name,int)
                 end
                 parameters[#parameters+1] = formats.parameter:format(left,"",concat(right, ", "))
             else
-                local what = sequence[n+1]
+                local what = tags[n]
                 for r, d, k in xml.elements(d[k],"(cd:parameter|cd:inherit)") do
                     local tag = d[k].tg
                     local left, right = d[k].at.name or "?", { }
                     if tag == "inherit" then
                         local name = d[k].at.name or "?"
-                        local goto = document.setups.formats.href:format(name,"\\"..name)
+                        local goto = document.setups.formats.href_as_command[lastmode]:format(name,lastmode,name)
                         if #parameters > 0 and not parameters[#parameters]:find("
") then parameters[#parameters+1] = formats.parameter:format("
","","") end @@ -521,7 +538,8 @@ function document.setups.collect(name,int) end parameters[#parameters+1] = formats.parameter:format("
","","") end - data.parameters = parameters + data.parameters = parameters or { } + data.mode = formats.modes[lastmode or 1] return data else return nil @@ -532,25 +550,6 @@ end tex = tex or { } -lmx.variables['color-background-green'] = '#4F6F6F' -lmx.variables['color-background-blue'] = '#6F6F8F' -lmx.variables['color-background-yellow'] = '#8F8F6F' -lmx.variables['color-background-purple'] = '#8F6F8F' - -lmx.variables['color-background-body'] = '#808080' -lmx.variables['color-background-main'] = '#3F3F3F' -lmx.variables['color-background-main-left'] = '#3F3F3F' -lmx.variables['color-background-main-right'] = '#5F5F5F' -lmx.variables['color-background-one'] = lmx.variables['color-background-green'] -lmx.variables['color-background-two'] = lmx.variables['color-background-blue'] - -lmx.variables['title-default'] = 'ConTeXt Help Information' -lmx.variables['title'] = lmx.variables['title-default'] - -function lmx.loadedfile(filename) - return io.loaddata(resolvers.find_file(filename)) -- return resolvers.texdatablob(filename) -end - -- -- -- local interfaces = { @@ -564,7 +563,19 @@ local interfaces = { romanian = 'ro', } -local lastinterface, lastcommand, lastsource = "en", "", "" +local lastinterface, lastcommand, lastsource, lastmode = "en", "", "", 1 + +local variables = { + ['color-background-main-left'] = '#3F3F3F', + ['color-background-main-right'] = '#5F5F5F', + ['color-background-one'] = lmx.get('color-background-green'), + ['color-background-two'] = lmx.get('color-background-blue'), + ['title'] = 'ConTeXt Help Information', +} + +--~ function lmx.loadedfile(filename) +--~ return io.loaddata(resolvers.find_file(filename)) -- return resolvers.texdatablob(filename) +--~ end local function doit(configuration,filename,hashed) @@ -572,9 +583,12 @@ local function doit(configuration,filename,hashed) local start = os.clock() - local detail = aux.settings_to_hash(hashed.query or "") + local detail = url.query(hashed.query or "") - lastinterface, lastcommand, lastsource = detail.interface or lastinterface, detail.command or lastcommand, detail.source or lastsource + lastinterface = detail.interface or lastinterface + lastcommand = detail.command or lastcommand + lastsource = detail.source or lastsource + lastmode = tonumber(detail.mode or lastmode) or 1 if lastinterface then logs.simple("checking interface: %s",lastinterface) @@ -587,58 +601,61 @@ local function doit(configuration,filename,hashed) local result = { content = "error" } local names, refs, ints = document.setups.names(lastinterface), { }, { } - for k,v in ipairs(names) do - refs[k] = document.setups.formats.href:format(v[1],v[2]) + for k=1,#names do + local v = names[k] + refs[k] = formats.href_in_list[lastmode]:format(v[1],lastmode,v[2]) end - for k,v in ipairs(table.sortedkeys(interfaces)) do - ints[k] = document.setups.formats.interface:format(interfaces[v],v) + if lastmode ~= 2 then + local sorted = table.sortedkeys(interfaces) + for k=1,#sorted do + local v = sorted[k] + ints[k] = formats.interface:format(interfaces[v],lastmode,v) + end end - lmx.restore() - lmx.set('title', 'ConTeXt Help Information') - lmx.set('color-background-one', lmx.get('color-background-green')) - lmx.set('color-background-two', lmx.get('color-background-blue')) - local n = concat(refs,"
") local i = concat(ints,"

") if div then - lmx.set('names',div:format(n)) - lmx.set('interfaces',div:format(i)) + variables.names = div:format(n) + variables.interfaces = div:format(i) else - lmx.set('names', n) - lmx.set('interfaces', i) + variables.names = n + variables.interfaces = i end -- first we need to add information about mkii/mkiv + variables.maintitle = "no definition" + variables.maintext = "" + variables.extra = "" + if document.setups.showsources and lastsource and lastsource ~= "" then -- todo: mkii, mkiv, tex (can be different) local data = io.loaddata(resolvers.find_file(lastsource)) - lmx.set('maintitle', lastsource) - lmx.set('maintext', formats.listing:format(data)) + variables.maintitle = lastsource + variables.maintext = formats.listing:format(data) lastsource = "" elseif lastcommand and lastcommand ~= "" then - local data = document.setups.collect(lastcommand,lastinterface) + local data = document.setups.collect(lastcommand,lastinterface,lastmode) if data then - lmx.set('maintitle', data.sequence) - local extra = { } - for k, v in ipairs { "environment", "category", "source" } do + local what, extra = { "environment", "category", "source", "mode" }, { } + for k=1,#what do + local v = what[k] if data[v] and data[v] ~= "" then lmx.set(v, data[v]) extra[#extra+1] = v .. ": " .. data[v] end end - lmx.set('extra', concat(extra,", ")) - lmx.set('maintext', formats.parameters:format(concat(data.parameters))) + variables.maintitle = data.sequence + variables.maintext = formats.parameters:format(concat(data.parameters)) + variables.extra = concat(extra,"   ") else - lmx.set('maintext', "select command") + variables.maintext = "select command" end - else - lmx.set('maintext', "no definition") end - local content = lmx.convert('context-help.lmx') + local content = lmx.convert('context-help.lmx',false,variables) logs.simple("time spent on page: %0.03f seconds",os.clock()-start) diff --git a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-startup.lua b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-startup.lua index fcb757b3e60..59536c36cdb 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-startup.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-server-ctx-startup.lua @@ -10,25 +10,6 @@ dofile(resolvers.find_file("trac-lmx.lua","tex")) function doit(configuration,filename,hashed) - lmx.restore() - - lmx.variables['color-background-green'] = '#4F6F6F' - lmx.variables['color-background-blue'] = '#6F6F8F' - lmx.variables['color-background-yellow'] = '#8F8F6F' - lmx.variables['color-background-purple'] = '#8F6F8F' - - lmx.variables['color-background-body'] = '#808080' - lmx.variables['color-background-main'] = '#3F3F3F' - lmx.variables['color-background-one'] = lmx.variables['color-background-green'] - lmx.variables['color-background-two'] = lmx.variables['color-background-blue'] - - lmx.variables['title'] = "Overview Of Goodies" - - lmx.set('title', lmx.get('title')) - lmx.set('color-background-one', lmx.get('color-background-green')) - lmx.set('color-background-two', lmx.get('color-background-blue')) - - local list = { } local root = file.dirname(resolvers.find_file("mtx-server.lua") or ".") if root == "" then root = "." end @@ -42,11 +23,16 @@ function doit(configuration,filename,hashed) end end - lmx.set('maintext',table.concat(list,"\n")) - - result = { content = lmx.convert('context-base.lmx') } + local variables = { + ['color-background-one'] = lmx.get('color-background-green'), + ['color-background-two'] = lmx.get('color-background-blue'), + ['title'] = "Overview Of Goodies", + ['color-background-one'] = lmx.get('color-background-green'), + ['color-background-two'] = lmx.get('color-background-blue'), + ['maintext'] = table.concat(list,"\n"), + } - return result + return { content = lmx.convert('context-base.lmx',false,variables) } end diff --git a/Master/texmf-dist/scripts/context/lua/mtx-server.lua b/Master/texmf-dist/scripts/context/lua/mtx-server.lua index 615506ac0ce..dc0befcaa2f 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-server.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-server.lua @@ -252,7 +252,8 @@ function scripts.webserver.run(configuration) end -- locate root and index file in tex tree if not lfs.isdir(configuration.root) then - for _, name in ipairs(indices) do + for i=1,#indices do + local name = indices[i] local root = resolvers.resolve("path:" .. name) or "" if root ~= "" then configuration.root = root @@ -263,7 +264,8 @@ function scripts.webserver.run(configuration) end configuration.root = dir.expand_name(configuration.root) if not configuration.index then - for _, name in ipairs(indices) do + for i=1,#indices do + local name = indices[i] if lfs.isfile(file.join(configuration.root,name)) then configuration.index = name -- we will prepend the rootpath later break @@ -281,8 +283,11 @@ function scripts.webserver.run(configuration) logs.simple("scripts subpath: %s",configuration.scripts) logs.simple("context services: http://localhost:%s/mtx-server-ctx-startup.lua",configuration.port) local server = assert(socket.bind("*", configuration.port)) +--~ local reading = { server } while true do -- no multiple clients local start = os.clock() +--~ local input = socket.select(reading) +--~ local client = input:accept() local client = server:accept() client:settimeout(configuration.timeout or 60) local request, e = client:receive() @@ -323,7 +328,7 @@ function scripts.webserver.run(configuration) end end -logs.extendbanner("Simple Webserver 0.10") +logs.extendbanner("Simple Webserver For Helpers 0.10") messages.help = [[ --start start server diff --git a/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua b/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua index f525d5336e2..73ab846cd7f 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-texworks.lua @@ -20,29 +20,31 @@ local texworkspaths = { } local texworkssignal = "texworks-context.rme" -local texworkininame = "TeXworks.ini" +local texworkininame = "texworks.ini" function scripts.texworks.start(indeed) - local is_mswin = os.platform == "windows" - local workname = (is_mswin and "texworks.exe") or "TeXworks" + local workname = (os.type == "windows" and "texworks.exe") or "texworks" local fullname = nil local binpaths = file.split_path(os.getenv("PATH")) or file.split_path(os.getenv("path")) - local datapath = resolvers.find_file(texworkssignal,"other text files") or "" + local usedsignal = texworkssignal + local datapath = resolvers.find_file(usedsignal,"other text files") or "" if datapath ~= "" then datapath = file.dirname(datapath) -- data if datapath == "" then - datapath = resolvers.ownpath + datapath = resolvers.clean_path(lfs.currentdir()) end else - datapath = resolvers.find_file(texworkininame,"other text files") or "" + usedsignal = texworkininame + datapath = resolvers.find_file(usedsignal,"other text files") or "" if datapath == "" then - datapath = resolvers.find_file(string.lower(texworkininame),"other text files") or "" + usedsignal = string.lower(usedsignal) + datapath = resolvers.find_file(usedsignal,"other text files") or "" end if datapath ~= "" and lfs.isfile(datapath) then datapath = file.dirname(datapath) -- TUG datapath = file.dirname(datapath) -- data if datapath == "" then - datapath = resolvers.ownpath + datapath = resolvers.clean_path(lfs.currentdir()) end end end @@ -56,7 +58,7 @@ function scripts.texworks.start(indeed) end for i=1,#binpaths do local p = file.join(binpaths[i],workname) - if lfs.isfile(p) then + if lfs.isfile(p) and lfs.attributes(p,"size") > 10000 then -- avoind stub fullname = p break end @@ -65,22 +67,23 @@ function scripts.texworks.start(indeed) logs.simple("unable to locate %s",workname) return false end - for _, subpath in ipairs(texworkspaths) do - dir.makedirs(file.join(datapath,subpath)) + for i=1,#texworkspaths do + dir.makedirs(file.join(datapath,texworkspaths[i])) end os.setenv("TW_INIPATH",datapath) os.setenv("TW_LIBPATH",datapath) if not indeed or environment.argument("verbose") then - logs.simple("data path: %s", datapath) - logs.simple("full name: %s", fullname) + logs.simple("used signal: %s", usedsignal) + logs.simple("data path : %s", datapath) + logs.simple("full name : %s", fullname) + logs.simple("set paths : TW_INIPATH TW_LIBPATH") end if indeed then os.launch(fullname) end end - -logs.extendbanner("TeXworks startup script 1.0",true) +logs.extendbanner("TeXworks Startup Script 1.00",true) messages.help = [[ --start [--verbose] start texworks diff --git a/Master/texmf-dist/scripts/context/lua/mtx-timing.lua b/Master/texmf-dist/scripts/context/lua/mtx-timing.lua index 1dcb9aa0e0d..40e33cdaef0 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-timing.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-timing.lua @@ -55,15 +55,17 @@ local html_menu = [[ local directrun = true -function goodies.progress.make_svg(filename,other) +local what = { "parameters", "nodes" } + +function plugins.progress.make_svg(filename,other) local metadata, menudata, c = { }, { }, 0 metadata[#metadata+1] = 'outputformat := "svg" ;' - for _, kind in pairs { "parameters", "nodes" } do - local mdk = { } + for i=1,#what do + local kind, mdk = what[i], { } menudata[kind] = mdk - for n, name in pairs(goodies.progress[kind](filename)) do - local first = goodies.progress.path(filename,name) - local second = goodies.progress.path(filename,other) + for n, name in next, plugins.progress[kind](filename) do + local first = plugins.progress.path(filename,name) + local second = plugins.progress.path(filename,other) c = c + 1 metadata[#metadata+1] = format(meta,c,first,second) mdk[#mdk+1] = { name, c } @@ -88,18 +90,19 @@ function goodies.progress.make_svg(filename,other) end end -function goodies.progress.makehtml(filename,other,menudata,metadata) +function plugins.progress.makehtml(filename,other,menudata,metadata) local graphics = { } local result = { graphics = graphics } - for _, kind in pairs { "parameters", "nodes" } do + for i=1,#what do + local kind, menu = what[i], { } local md = menudata[kind] - local menu = { } result[kind] = menu - for k, v in ipairs(md) do + for k=1,#md do + local v = md[k] local name, number = v[1], v[2] - local min = goodies.progress.bot(filename,name) - local max = goodies.progress.top(filename,name) - local pages = goodies.progress.pages(filename) + local min = plugins.progress.bot(filename,name) + local max = plugins.progress.top(filename,name) + local pages = plugins.progress.pages(filename) local average = math.round(max/pages) if directrun then local data = metadata[number] @@ -119,55 +122,44 @@ function goodies.progress.makehtml(filename,other,menudata,metadata) return result end -function goodies.progress.valid_file(name) +function plugins.progress.valid_file(name) return name and name ~= "" and lfs.isfile(name .. "-luatex-progress.lut") end -function goodies.progress.make_lmx_page(name,launch,remove) +function plugins.progress.make_lmx_page(name,launch,remove) + local filename = name .. "-luatex-progress" local other = "elapsed_time" local template = 'context-timing.lmx' - lmx.variables['color-background-green'] = '#4F6F6F' - lmx.variables['color-background-blue'] = '#6F6F8F' - lmx.variables['color-background-yellow'] = '#8F8F6F' - lmx.variables['color-background-purple'] = '#8F6F8F' - - lmx.variables['color-background-body'] = '#808080' - lmx.variables['color-background-main'] = '#3F3F3F' - lmx.variables['color-background-one'] = lmx.variables['color-background-green'] - lmx.variables['color-background-two'] = lmx.variables['color-background-blue'] + plugins.progress.convert(filename) - lmx.variables['title-default'] = 'ConTeXt Timing Information' - lmx.variables['title'] = lmx.variables['title-default'] + local menudata, metadata = plugins.progress.make_svg(filename,other) + local htmldata = plugins.progress.makehtml(filename,other,menudata,metadata) lmx.htmfile = function(name) return name .. "-timing.xhtml" end lmx.lmxfile = function(name) return resolvers.find_file(name,'tex') end - lmx.set('title', format('ConTeXt Timing Information: %s',file.basename(name))) - lmx.set('color-background-one', lmx.get('color-background-green')) - lmx.set('color-background-two', lmx.get('color-background-blue')) - - goodies.progress.convert(filename) - - local menudata, metadata = goodies.progress.make_svg(filename,other) - local htmldata = goodies.progress.makehtml(filename,other,menudata,metadata) - - lmx.set('parametersmenu', concat(htmldata.parameters, "  ")) - lmx.set('nodesmenu', concat(htmldata.nodes, "  ")) - lmx.set('graphics', concat(htmldata.graphics, "\n\n")) + local variables = { + ['title-default'] = 'ConTeXt Timing Information', + ['title'] = format('ConTeXt Timing Information: %s',file.basename(name)), + ['parametersmenu'] = concat(htmldata.parameters, "  "), + ['nodesmenu'] = concat(htmldata.nodes, "  "), + ['graphics'] = concat(htmldata.graphics, "\n\n"), + ['color-background-one'] = lmx.get('color-background-green'), + ['color-background-two'] = lmx.get('color-background-blue'), + } if launch then - local htmfile = lmx.show(template) + local htmfile = lmx.show(template,variables) if remove then os.sleep(1) -- give time to launch os.remove(htmfile) end else - lmx.make(template) + lmx.make(template,variables) end - lmx.restore() end scripts = scripts or { } @@ -176,17 +168,17 @@ scripts.timings = scripts.timings or { } function scripts.timings.xhtml(filename) if filename == "" then logs.simple("provide filename") - elseif not goodies.progress.valid_file(filename) then + elseif not plugins.progress.valid_file(filename) then logs.simple("first run context again with the --timing option") else local basename = file.removesuffix(filename) local launch = environment.argument("launch") local remove = environment.argument("remove") - goodies.progress.make_lmx_page(basename,launch,remove) + plugins.progress.make_lmx_page(basename,launch,remove) end end -logs.extendbanner("ConTeXt Timing Tools 0.1",true) +logs.extendbanner("ConTeXt Timing Tools 0.10",true) messages.help = [[ --xhtml make xhtml file diff --git a/Master/texmf-dist/scripts/context/lua/mtx-tools.lua b/Master/texmf-dist/scripts/context/lua/mtx-tools.lua index 87fd51dc60d..bf4add16840 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-tools.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-tools.lua @@ -6,9 +6,7 @@ if not modules then modules = { } end modules ['mtx-tools'] = { license = "see context related readme files" } --- data tables by Thomas A. Schmitz - -local find, gsub = string.find, string.gsub +local find, format, sub, rep, gsub, lower = string.find, string.format, string.sub, string.rep, string.gsub, string.lower scripts = scripts or { } scripts.tools = scripts.tools or { } @@ -17,7 +15,9 @@ local bomb_1, bomb_2 = "^\254\255", "^\239\187\191" function scripts.tools.disarmutfbomb() local force, done = environment.argument("force"), false - for _, name in ipairs(environment.files) do + local files = environment.files + for i=1,#files do + local name = files[i] if lfs.isfile(name) then local data = io.loaddata(name) if not data then @@ -44,14 +44,133 @@ function scripts.tools.disarmutfbomb() end end -logs.extendbanner("All Kind Of Tools 1.0",true) +function scripts.tools.downcase() + local pattern = environment.argument('pattern') or "*" + local recurse = environment.argument('recurse') + local force = environment.argument('force') + local n = 0 + if recurse and not find(pattern,"^%*%*%/") then + pattern = "**/*" .. pattern + end + dir.glob(pattern,function(name) + local basename = file.basename(name) + if lower(basename) ~= basename then + n = n + 1 + if force then + os.rename(name,lower(name)) + end + end + end) + if n > 0 then + if force then + logs.simple("%s files renamed",n) + else + logs.simple("use --force to do a real rename (%s files involved)",n) + end + else + logs.simple("nothing to do") + end +end + + +function scripts.tools.dirtoxml() + + local join, removesuffix, extname, date = file.join, file.removesuffix, file.extname, os.date + + local xmlns = "http://www.pragma-ade.com/rlg/xmldir.rng" + local timestamp = "%Y-%m-%d %H:%M" + + local pattern = environment.argument('pattern') or ".*" + local url = environment.argument('url') or "no-url" + local root = environment.argument('root') or "." + local outputfile = environment.argument('output') + + local recurse = environment.argument('recurse') + local stripname = environment.argument('stripname') + local longname = environment.argument('longname') + + local function flush(list,result,n,path) + n, result = n or 1, result or { } + local d = rep(" ",n) + for name, attr in table.sortedhash(list) do + local mode = attr.mode + if mode == "file" then + result[#result+1] = format("%s",d,(longname and path and join(path,name)) or name) + result[#result+1] = format("%s %s",d,removesuffix(name)) + result[#result+1] = format("%s %s",d,extname(name)) + result[#result+1] = format("%s %s",d,attr.size) + result[#result+1] = format("%s %s",d,sub(attr.permissions,7,9)) + result[#result+1] = format("%s %s",d,date(timestamp,attr.modification)) + result[#result+1] = format("%s",d) + elseif mode == "directory" then + result[#result+1] = format("%s",d,name) + flush(attr.list,result,n+1,(path and join(path,name)) or name) + result[#result+1] = format("%s",d) + end + end + end + + if not pattern or pattern == "" then + logs.report('provide --pattern=') + return + end + + if stripname then + pattern = file.dirname(pattern) + end + + local luapattern = string.topattern(pattern,true) + + lfs.chdir(root) + + local list = dir.collect_pattern(root,luapattern,recurse) + + if list[outputfile] then + list[outputfile] = nil + end + + local result = { "" } + result[#result+1] = format("",url,root,pattern,luapattern,xmlns,date(timestamp)) + flush(list,result) + result[#result+1] = "" + + result = table.concat(result,"\n") + + if not outputfile or outputfile == "" then + texio.write_nl(result) + else + io.savedata(outputfile,result) + end + +end + +logs.extendbanner("Some File Related Goodies 1.01",true) messages.help = [[ --disarmutfbomb remove utf bomb if present + --force remove indeed + +--dirtoxml glob directory into xml + --pattern glob pattern (default: *) + --url url attribute (no processing) + --root the root of the globbed path (default: .) + --output output filename (console by default) + --recurse recurse into subdirecories + --stripname take pathpart of given pattern + --longname set name attributes to full path name + +--downcase + --pattern glob pattern (default: *) + --recurse recurse into subdirecories + --force downcase indeed ]] if environment.argument("disarmutfbomb") then scripts.tools.disarmutfbomb() +elseif environment.argument("dirtoxml") then + scripts.tools.dirtoxml() +elseif environment.argument("downcase") then + scripts.tools.downcase() else logs.help(messages.help) end diff --git a/Master/texmf-dist/scripts/context/lua/mtx-update.lua b/Master/texmf-dist/scripts/context/lua/mtx-update.lua index bc6ca402639..b56083d3809 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-update.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-update.lua @@ -86,10 +86,12 @@ scripts.update.engines = { ["xetex"] = { { "base/xetex/", "texmf" }, { "fonts/new/", "texmf" }, + { "bin/luatex//", "texmf-" }, -- tools { "bin/xetex//", "texmf-" }, }, ["pdftex"] = { { "fonts/old/", "texmf" }, + { "bin/luatex//", "texmf-" }, -- tools { "bin/pdftex//", "texmf-" }, }, ["all"] = { @@ -112,30 +114,34 @@ scripts.update.goodies = { } scripts.update.platforms = { - ["mswin"] = "mswin", - ["windows"] = "mswin", - ["win32"] = "mswin", - ["win"] = "mswin", - ["linux"] = "linux", - ["freebsd"] = "freebsd", - ["freebsd-amd64"] = "freebsd-amd64", - ["linux-32"] = "linux", - ["linux-64"] = "linux-64", - ["linux32"] = "linux", - ["linux64"] = "linux-64", - ["linux-ppc"] = "linux-ppc", - ["ppc"] = "linux-ppc", - ["osx"] = "osx-intel", - ["macosx"] = "osx-intel", - ["osx-intel"] = "osx-intel", - ["osx-ppc"] = "osx-ppc", - ["osx-powerpc"] = "osx-ppc", - ["osxintel"] = "osx-intel", - ["osxppc"] = "osx-ppc", - ["osxpowerpc"] = "osx-ppc", - ["solaris-intel"] = "solaris-intel", - ["solaris-sparc"] = "solaris-sparc", - ["solaris"] = "solaris-sparc", + ["mswin"] = "mswin", + ["windows"] = "mswin", + ["win32"] = "mswin", + ["win"] = "mswin", + ["linux"] = "linux", + ["freebsd"] = "freebsd", + ["freebsd-amd64"] = "freebsd-amd64", + ["kfreebsd"] = "kfreebsd-i386", + ["kfreebsd-i386"] = "kfreebsd-i386", + ["kfreebsd-amd64"] = "kfreebsd-amd64", + ["linux-32"] = "linux", + ["linux-64"] = "linux-64", + ["linux32"] = "linux", + ["linux64"] = "linux-64", + ["linux-ppc"] = "linux-ppc", + ["ppc"] = "linux-ppc", + ["osx"] = "osx-intel", + ["macosx"] = "osx-intel", + ["osx-intel"] = "osx-intel", + ["osx-ppc"] = "osx-ppc", + ["osx-powerpc"] = "osx-ppc", + ["osx-64"] = "osx-64", + ["osxintel"] = "osx-intel", + ["osxppc"] = "osx-ppc", + ["osxpowerpc"] = "osx-ppc", + ["solaris-intel"] = "solaris-intel", + ["solaris-sparc"] = "solaris-sparc", + ["solaris"] = "solaris-sparc", } -- the list is filled up later (when we know what modules to download) @@ -144,12 +150,14 @@ scripts.update.modules = { } function scripts.update.run(str) - logs.report("run", str) + -- important, otherwise formats fly to a weird place + -- (texlua sets luatex as the engine, we need to reset that or to fix texexec :) + os.setenv("engine",nil) if environment.argument("force") then - -- important, otherwise formats fly to a weird place - -- (texlua sets luatex as the engine, we need to reset that or to fix texexec :) - os.setenv("engine",nil) + logs.report("run", str) os.execute(str) + else + logs.report("dry run", str) end end @@ -165,16 +173,16 @@ function scripts.update.synchronize() logs.report("update","start") - local texroot = scripts.update.fullpath(states.get("paths.root")) - local engines = states.get('engines') or { } - local platforms = states.get('platforms') or { } - local repositories = states.get('repositories') -- minimals - local bin = states.get("rsync.program") -- rsync - local url = states.get("rsync.server") -- contextgarden.net - local version = states.get("context.version") -- current (or beta) - local extras = states.get("extras") -- extras (like modules) - local goodies = states.get("goodies") -- goodies (like editors) - local force = environment.argument("force") + local texroot = scripts.update.fullpath(states.get("paths.root")) + local engines = states.get('engines') or { } + local platforms = states.get('platforms') or { } + local repositories = states.get('repositories') -- minimals + local bin = states.get("rsync.program") -- rsync + local url = states.get("rsync.server") -- contextgarden.net + local version = states.get("context.version") -- current (or beta) + local extras = states.get("extras") -- extras (like modules) + local goodies = states.get("goodies") -- goodies (like editors) + local force = environment.argument("force") bin = string.gsub(bin,"\\","/") @@ -188,18 +196,19 @@ function scripts.update.synchronize() if force then dir.mkdirs(format("%s/%s", texroot, "texmf-cache")) dir.mkdirs(format("%s/%s", texroot, "texmf-local")) + dir.mkdirs(format("%s/%s", texroot, "texmf-project")) end if ok or not force then - local fetched, individual, osplatform = { }, { }, os.currentplatform() + local fetched, individual, osplatform = { }, { }, os.platform -- takes a collection as argument and returns a list of folders local function collection_to_list_of_folders(collection, platform) local archives = {} - for _, c in ipairs(collection) do - local archive = c[1] + for i=1,#collection do + local archive = collection[i][1] archive = archive:gsub("", platform) archive = archive:gsub("", version) archives[#archives+1] = archive @@ -220,7 +229,7 @@ function scripts.update.synchronize() return prefix .. concat(list_of_folders, format(" %s", prefix)) end - -- example of usage: print(list_of_folders_to_rsync_string(collection_to_list_of_folders(scripts.update.base, os.currentplatform))) + -- example of usage: print(list_of_folders_to_rsync_string(collection_to_list_of_folders(scripts.update.base, os.platform))) -- rename function and add some more functionality: -- * recursive/non-recursive (default: non-recursive) @@ -255,7 +264,8 @@ function scripts.update.synchronize() local available_modules = get_list_of_files_from_rsync({"modules/"}) -- hash of requested modules -- local h = table.tohash(extras:split(",")) - for _, s in ipairs(available_modules) do + for i=1,#available_modules do + local s = available_modules[i] -- if extras == "all" or h[s] then if extras.all or extras[s] then scripts.update.modules[#scripts.update.modules+1] = { format("modules/%s/",s), "texmf-context" } @@ -269,7 +279,8 @@ function scripts.update.synchronize() if collection and platform then platform = scripts.update.platforms[platform] if platform then - for _, c in ipairs(collection) do + for i=1,#collection do + local c = collection[i] local archive = c[1]:gsub("", platform) local destination = format("%s/%s", texroot, c[2]:gsub("", platform)) destination = destination:gsub("\\","/") @@ -283,30 +294,32 @@ function scripts.update.synchronize() end end - for platform, _ in pairs(platforms) do + for platform, _ in next, platforms do add_collection(scripts.update.base,platform) end - for platform, _ in pairs(platforms) do + for platform, _ in next, platforms do add_collection(scripts.update.modules,platform) end - for engine, _ in pairs(engines) do - for platform, _ in pairs(platforms) do + for engine, _ in next, engines do + for platform, _ in next, platforms do add_collection(scripts.update.engines[engine],platform) end end if goodies and type(goodies) == "table" then - for goodie, _ in pairs(goodies) do - for platform, _ in pairs(platforms) do + for goodie, _ in next, goodies do + for platform, _ in next, platforms do add_collection(scripts.update.goodies[goodie],platform) end end end local combined = { } - for _, repository in ipairs(scripts.update.repositories) do + local update_repositories = scripts.update.repositories + for i=1,#update_repositories do + local repository = update_repositories[i] if repositories[repository] then - for _, v in pairs(individual) do + for _, v in next, individual do local archive, destination = v[1], v[2] local cd = combined[destination] if not cd then @@ -318,14 +331,14 @@ function scripts.update.synchronize() end end if logs.verbose then - for k, v in pairs(combined) do + for k, v in next, combined do logs.report("update", k) - for k,v in ipairs(v) do - logs.report("update", " <= " .. v) + for i=1,#v do + logs.report("update", " <= " .. v[i]) end end end - for destination, archive in pairs(combined) do + for destination, archive in next, combined do local archives, command = concat(archive," "), "" -- local normalflags, deleteflags = states.get("rsync.flags.normal"), states.get("rsync.flags.delete") -- if environment.argument("keep") or destination:find("%.$") then @@ -334,13 +347,17 @@ function scripts.update.synchronize() -- command = format("%s %s %s %s'%s' '%s'", bin, normalflags, deleteflags, url, archives, destination) -- end local normalflags, deleteflags = states.get("rsync.flags.normal"), "" + local dryrunflags = "" + if not environment.argument("force") then + dryrunflags = "--dry-run" + end if (destination:find("texmf$") or destination:find("texmf%-context$")) and (not environment.argument("keep")) then deleteflags = states.get("rsync.flags.delete") end - command = format("%s %s %s %s'%s' '%s'", bin, normalflags, deleteflags, url, archives, destination) - logs.report("mtx update", format("running command: %s",command)) + command = format("%s %s %s %s %s'%s' '%s'", bin, normalflags, deleteflags, dryrunflags, url, archives, destination) + --logs.report("mtx update", format("running command: %s",command)) if not fetched[command] then - scripts.update.run(command) + scripts.update.run(command,true) fetched[command] = command end end @@ -363,7 +380,7 @@ function scripts.update.synchronize() end end - for platform, _ in pairs(platforms) do + for platform, _ in next, platforms do update_script('luatools',platform) update_script('mtxrun',platform) end @@ -374,12 +391,20 @@ function scripts.update.synchronize() if not force then logs.report("update", "use --force to really update files") end + + resolvers.load_tree(texroot) -- else we operate in the wrong tree + + -- update filename database for pdftex/xetex + scripts.update.run("mktexlsr") + -- update filename database for luatex + scripts.update.run("luatools --generate") + logs.report("update","done") end function table.fromhash(t) local h = { } - for k, v in pairs(t) do -- no ipairs here + for k, v in next, t do -- not indexed if v then h[#h+1] = k end end return h @@ -390,34 +415,34 @@ function scripts.update.make() logs.report("make","start") - local force = environment.argument("force") - local texroot = scripts.update.fullpath(states.get("paths.root")) - local engines = states.get('engines') - local goodies = states.get('goodies') + local force = environment.argument("force") + local texroot = scripts.update.fullpath(states.get("paths.root")) + local engines = states.get('engines') + local goodies = states.get('goodies') local platforms = states.get('platforms') - local formats = states.get('formats') + local formats = states.get('formats') resolvers.load_tree(texroot) - -- update filename database for pdftex/xetex + scripts.update.run("mktexlsr") - -- update filename database for luatex scripts.update.run("luatools --generate") + local askedformats = formats local texformats = table.tohash(scripts.update.texformats) local mpformats = table.tohash(scripts.update.mpformats) - for k,v in pairs(texformats) do + for k,v in next, texformats do if not askedformats[k] then texformats[k] = nil end end - for k,v in pairs(mpformats) do + for k,v in next, mpformats do if not askedformats[k] then mpformats[k] = nil end end local formatlist = concat(table.fromhash(texformats), " ") if formatlist ~= "" then - for engine in pairs(engines) do + for engine in next, engines do if engine == "luatex" then scripts.update.run(format("context --make")) -- maybe also formatlist else @@ -438,7 +463,7 @@ function scripts.update.make() logs.report("make","done") end -logs.extendbanner("Download Tools 0.20",true) +logs.extendbanner("ConTeXt Minimals Updater 0.21",true) messages.help = [[ --platform=string platform (windows, linux, linux-64, osx-intel, osx-ppc, linux-ppc) @@ -476,7 +501,7 @@ if scripts.savestate then states.set("rsync.program", environment.argument("rsync"), "rsync", true) -- ok states.set("rsync.server", environment.argument("server"), "contextgarden.net::", true) -- ok states.set("rsync.module", environment.argument("module"), "minimals", true) -- ok - states.set("rsync.flags.normal", environment.argument("flags"), "-rpztlv --stats", true) -- ok + states.set("rsync.flags.normal", environment.argument("flags"), "-rpztlv", true) -- ok states.set("rsync.flags.delete", nil, "--delete", true) -- ok states.set("paths.root", environment.argument("texroot"), "tex", true) -- ok @@ -490,7 +515,7 @@ if scripts.savestate then local valid = scripts.update.engines for r in gmatch(environment.argument("engine") or "all","([^, ]+)") do if r == "all" then - for k, v in pairs(valid) do + for k, v in next, valid do if k ~= "all" then states.set("engines." .. k, true) end @@ -500,7 +525,7 @@ if scripts.savestate then end end local valid = scripts.update.platforms - for r in gmatch(environment.argument("platform") or os.currentplatform(),"([^, ]+)") do + for r in gmatch(environment.argument("platform") or os.platform,"([^, ]+)") do if valid[r] then states.set("platforms." .. r, true) end end diff --git a/Master/texmf-dist/scripts/context/lua/mtx-watch.lua b/Master/texmf-dist/scripts/context/lua/mtx-watch.lua index 2e4dcf6efec..10f01cf8698 100644 --- a/Master/texmf-dist/scripts/context/lua/mtx-watch.lua +++ b/Master/texmf-dist/scripts/context/lua/mtx-watch.lua @@ -9,160 +9,247 @@ if not modules then modules = { } end modules ['mtx-watch'] = { scripts = scripts or { } scripts.watch = scripts.watch or { } -do +local format, concat, difftime, time = string.format, table.concat, os.difftime, os.time +local next, type = next, type - function scripts.watch.save_exa_modes(joblog,ctmname) +-- the machine/instance matches the server app we use + +local machine = socket.dns.gethostname() or "unknown-machine" +local instance = string.match(machine,"(%d+)$") or "0" + +function scripts.watch.save_exa_modes(joblog,ctmname) + local values = joblog and joblog.values + if values then local t= { } - if joblog then - t[#t+1] = "\n" - t[#t+1] = "" - if joblog.values then - for k, v in pairs(joblog.values) do - t[#t+1] = string.format("\t%s", k, tostring(v)) + t[#t+1] = "\n" + t[#t+1] = "" + for k, v in next, joblog.values do + t[#t+1] = format("\t%s", k, tostring(v)) + end + t[#t+1] = "" + io.savedata(ctmname,concat(t,"\n")) + else + os.remove(ctmname) + end +end + +local function toset(t) + if type(t) == "table" then + return concat(t,",") + else + return t + end +end + +local function noset(t) + if type(t) == "table" then + return t[1] + else + return t + end +end + +local lfsdir, lfsattributes = lfs.dir, lfs.attributes + +local function glob(files,path) + for name in lfsdir(path) do + if name:find("^%.") then + -- skip . and .. + else + name = path .. "/" .. name + local a = lfsattributes(name) + if not a then + -- weird + elseif a.mode == "directory" then + if name:find("graphics$") or name:find("figures$") or name:find("resources$") then + -- skip these too + else + glob(files,name) end - else - t[#t+1] = "" + elseif name:find(".%luj$") then + files[name] = a.change or a.ctime or a.modification or a.mtime end - t[#t+1] = "" end - os.remove(ctmname) - io.savedata(ctmname,table.concat(t,"\n")) end +end - function scripts.watch.watch() - local delay = environment.argument("delay") or 5 - local logpath = environment.argument("logpath") or "" - local pipe = environment.argument("pipe") or false - if #environment.files > 0 then - for _, path in ipairs(environment.files) do - logs.report("watch", "watching path ".. path) +local clock = os.gettimeofday or os.time -- we cannot trust os.clock on linux + +function scripts.watch.watch() + local delay = tonumber(environment.argument("delay") or 5) or 5 + if delay == 0 then + delay = .25 + end + local logpath = environment.argument("logpath") or "" + local pipe = environment.argument("pipe") or false + local watcher = "mtxwatch.run" + local paths = environment.files + if #paths > 0 then + if environment.argument("automachine") then + logpath = string.gsub(logpath,"/machine/","/"..machine.."/") + for i=1,#paths do + paths[i] = string.gsub(paths[i],"/machine/","/"..machine.."/") end - local function glob(files,path) - for name in lfs.dir(path) do - if name:find("^%.") then - -- skip . and .. - else - name = path .. "/" .. name - local a = lfs.attributes(name) - if not a then - -- weird - elseif a.mode == "directory" then - if name:find("graphics$") or name:find("figures$") or name:find("resources$") then - -- skip these too + end + for i=1,#paths do + logs.report("watch", "watching path ".. paths[i]) + end + local function process() + local done = false + for i=1,#paths do + local path = paths[i] + lfs.chdir(path) + local files = { } + glob(files,path) + table.sort(files) -- what gets sorted here, todo: by time + for name, time in next, files do + --~ local ok, joblog = xpcall(function() return dofile(name) end, function() end ) + local ok, joblog = pcall(dofile,name) + if ok and joblog then + if joblog.status == "processing" then + logs.report("watch",format("aborted job, %s added to queue",name)) + joblog.status = "queued" + io.savedata(name, table.serialize(joblog,true)) + elseif joblog.status == "queued" then + local command = joblog.command + if command then + local replacements = { + inputpath = toset((joblog.paths and joblog.paths.input ) or "."), + outputpath = noset((joblog.paths and joblog.paths.output) or "."), + filename = joblog.filename or "", + } + -- todo: revision path etc + command = command:gsub("%%(.-)%%", replacements) + if command ~= "" then + joblog.status = "processing" + joblog.runtime = clock() + io.savedata(name, table.serialize(joblog,true)) + logs.report("watch",format("running: %s", command)) + local newpath = file.dirname(name) + io.flush() + local result = "" + local ctmname = file.basename(replacements.filename) + if ctmname == "" then ctmname = name end -- use self as fallback + ctmname = file.replacesuffix(ctmname,"ctm") + if newpath ~= "" and newpath ~= "." then + local oldpath = lfs.currentdir() + lfs.chdir(newpath) + scripts.watch.save_exa_modes(joblog,ctmname) + if pipe then result = os.resultof(command) else result = os.spawn(command) end + lfs.chdir(oldpath) + else + scripts.watch.save_exa_modes(joblog,ctmname) + if pipe then result = os.resultof(command) else result = os.spawn(command) end + end + logs.report("watch",format("return value: %s", result)) + done = true + local path, base = replacements.outputpath, file.basename(replacements.filename) + joblog.runtime = clock() - joblog.runtime + if base ~= "" then + joblog.result = file.replacesuffix(file.join(path,base),"pdf") + joblog.size = lfs.attributes(joblog.result,"size") + end + joblog.status = "finished" + else + joblog.status = "invalid command" + end else - glob(files,name) + joblog.status = "no command" + end + -- pcall, when error sleep + again + -- todo: just one log file and append + io.savedata(name, table.serialize(joblog,true)) + if logpath and logpath ~= "" then + local name = file.join(logpath,os.uuid() .. ".lua") + io.savedata(name, table.serialize(joblog,true)) + logs.report("watch", "saving joblog in " .. name) end - elseif name:find(".%luj$") then - files[name] = a.change or a.ctime or a.modification or a.mtime end end end end - local function toset(t) - if type(t) == "table" then - return table.concat(t,",") - else - return t + end + local n, start = 0, time() +--~ local function wait() +--~ io.flush() +--~ if not done then +--~ n = n + 1 +--~ if n >= 10 then +--~ logs.report("watch", format("run time: %i seconds, memory usage: %0.3g MB", difftime(time(),start), (status.luastate_bytes/1024)/1000)) +--~ n = 0 +--~ end +--~ os.sleep(delay) +--~ end +--~ end + local wtime = 0 + local function wait() + io.flush() + if not done then + n = n + 1 + if n >= 10 then + logs.report("watch", format("run time: %i seconds, memory usage: %0.3g MB", difftime(time(),start), (status.luastate_bytes/1024)/1000)) + n = 0 end - end - local function noset(t) - if type(t) == "table" then - return t[1] - else - return t + local ttime = 0 + while ttime <= delay do + local wt = lfs.attributes(watcher,"mtime") + if wt and wt ~= wtime then + -- fast signal that there is a request + wtime = wt + break + end + ttime = ttime + 0.2 + os.sleep(0.2) end end - local function process() - local done = false - for _, path in ipairs(environment.files) do - lfs.chdir(path) - local files = { } - glob(files,path) - table.sort(files) -- what gets sorted here - for name, time in pairs(files) do - --~ local ok, joblog = xpcall(function() return dofile(name) end, function() end ) - local ok, joblog = pcall(dofile,name) - if ok and joblog then - if joblog.status == "processing" then - logs.report("watch",string.format("aborted job, %s added to queue",name)) - joblog.status = "queued" - io.savedata(name, table.serialize(joblog,true)) - elseif joblog.status == "queued" then - local command = joblog.command - if command then - local replacements = { - inputpath = toset((joblog.paths and joblog.paths.input ) or "."), - outputpath = noset((joblog.paths and joblog.paths.output) or "."), - filename = joblog.filename or "", - } - command = command:gsub("%%(.-)%%", replacements) - if command ~= "" then - joblog.status = "processing" - joblog.runtime = os.time() -- os.clock() - io.savedata(name, table.serialize(joblog,true)) - logs.report("watch",string.format("running: %s", command)) - local newpath = file.dirname(name) - io.flush() - local result = "" - local ctmname = file.basename(replacements.filename) - if ctmname == "" then ctmname = name end -- use self as fallback - ctmname = file.replacesuffix(ctmname,"ctm") - if newpath ~= "" and newpath ~= "." then - local oldpath = lfs.currentdir() - lfs.chdir(newpath) - scripts.watch.save_exa_modes(joblog,ctmname) - if pipe then result = os.resultof(command) else result = os.spawn(command) end - lfs.chdir(oldpath) - else - scripts.watch.save_exa_modes(joblog,ctmname) - if pipe then result = os.resultof(command) else result = os.spawn(command) end - end - logs.report("watch",string.format("return value: %s", result)) - done = true - local path, base = replacements.outputpath, file.basename(replacements.filename) - joblog.runtime = os.time() - joblog.runtime -- os.clock() - joblog.runtime - joblog.result = file.replacesuffix(file.join(path,base),"pdf") - joblog.size = lfs.attributes(joblog.result,"size") - joblog.status = "finished" - else - joblog.status = "invalid command" - end - else - joblog.status = "no command" - end - -- pcall, when error sleep + again - io.savedata(name, table.serialize(joblog,true)) - if logpath ~= "" then - local name = string.format("%s/%s%04i%09i.lua", logpath, os.time(), math.floor((os.clock()*100)%1000), math.random(99999999)) - io.savedata(name, table.serialize(joblog,true)) - logs.report("watch", "saving joblog ".. name) + end + local cleanupdelay, cleanup = environment.argument("cleanup"), false + if cleanupdelay then + local lasttime = time() + cleanup = function() + local currenttime = time() + local delta = difftime(currenttime,lasttime) + if delta > cleanupdelay then + lasttime = currenttime + for i=1,#paths do + local path = paths[i] + if string.find(path,"%.") then + -- safeguard, we want a fully qualified path + else + local files = dir.glob(file.join(path,"*")) + for i=1,#files do + local name = files[i] + local filetime = lfs.attributes(name,"modification") + local delta = difftime(currenttime,filetime) + if delta > cleanupdelay then + -- logs.report("watch",format("cleaning up '%s'",name)) + os.remove(name) end end end end end end - local n, start = 0, os.clock() - local function wait() - io.flush() - if not done then - n = n + 1 - if n >= 10 then - logs.report("watch", string.format("run time: %i seconds, memory usage: %0.3g MB", os.clock() - start, (status.luastate_bytes/1024)/1000)) - n = 0 - end - os.sleep(delay) - end + else + cleanup = function() + -- nothing end - while true do + end + while true do + if false then +--~ if true then + process() + cleanup() + wait() + else pcall(process) + pcall(cleanup) pcall(wait) end - else - logs.report("watch", "no paths to watch") end + else + logs.report("watch", "no paths to watch") end - end function scripts.watch.collect_logs(path) -- clean 'm up too @@ -171,10 +258,11 @@ function scripts.watch.collect_logs(path) -- clean 'm up too local files = dir.globfiles(path,false,"^%d+%.lua$") local collection = { } local valid = table.tohash({"filename","result","runtime","size","status"}) - for _, name in ipairs(files) do + for i=1,#files do + local name = files[i] local t = dofile(name) if t and type(t) == "table" and t.status then - for k, v in pairs(t) do + for k, v in next, t do if not valid[k] then t[k] = nil end @@ -186,21 +274,21 @@ function scripts.watch.collect_logs(path) -- clean 'm up too end function scripts.watch.save_logs(collection,path) -- play safe - if collection and not table.is_empty(collection) then + if collection and next(collection) then path = path or environment.argument("logpath") or "" path = (path == "" and ".") or path - local filename = string.format("%s/collected-%s.lua",path,tostring(os.time())) + local filename = format("%s/collected-%s.lua",path,tostring(time())) io.savedata(filename,table.serialize(collection,true)) local check = dofile(filename) - for k,v in pairs(check) do + for k,v in next, check do if not collection[k] then logs.error("watch", "error in saving file") os.remove(filename) return false end end - for k,v in pairs(check) do - os.remove(string.format("%s.lua",k)) + for k,v in next, check do + os.remove(format("%s.lua",k)) end return true else @@ -213,10 +301,11 @@ function scripts.watch.collect_collections(path) -- removes duplicates path = (path == "" and ".") or path local files = dir.globfiles(path,false,"^collected%-%d+%.lua$") local collection = { } - for _, name in ipairs(files) do + for i=1,#files do + local name = files[i] local t = dofile(name) if t and type(t) == "table" then - for k, v in pairs(t) do + for k, v in next, t do collection[k] = v end end @@ -227,26 +316,56 @@ end function scripts.watch.show_logs(path) -- removes duplicates local collection = scripts.watch.collect_collections(path) or { } local max = 0 - for k,v in pairs(collection) do + for k,v in next, collection do v = v.filename or "?" if #v > max then max = #v end end - print(max) - for k,v in ipairs(table.sortedkeys(collection)) do + -- print(max) + local sorted = table.sortedkeys(collection) + for k=1,#sorted do + local v = sorted[k] local c = collection[v] local f, s, r, n = c.filename or "?", c.status or "?", c.runtime or 0, c.size or 0 - logs.report("watch", string.format("%s %s %3i %8i %s",string.padd(f,max," "),string.padd(s,10," "),r,n,v)) + logs.report("watch", format("%s %s %3i %8i %s",string.padd(f,max," "),string.padd(s,10," "),r,n,v)) + end +end + +function scripts.watch.cleanup_stale_files() -- removes duplicates + local path = environment.files[1] + local delay = tonumber(environment.argument("cleanup")) + local force = environment.argument("force") + if not path or path == "." then + logs.report("watch","provide qualified path") + elseif not delay then + logs.report("watch","missing --cleanup=delay") + else + logs.report("watch","dryrun, use --force for real cleanup") + local files = dir.glob(file.join(path,"*")) + local rtime = time() + for i=1,#files do + local name = files[i] + local mtime = lfs.attributes(name,"modification") + local delta = difftime(rtime,mtime) + if delta > delay then + logs.report("watch",format("cleaning up '%s'",name)) + if force then + os.remove(name) + end + end + end end end -logs.extendbanner("Watchdog 1.00",true) +logs.extendbanner("ConTeXt Request Watchdog 1.00",true) messages.help = [[ --logpath optional path for log files ---watch watch given path +--watch watch given path [--delay] --pipe use pipe instead of execute --delay delay between sweeps +--automachine replace /machine/ in path // --collect condense log files +--cleanup=delay remove files in given path [--force] --showlog show log data ]] @@ -254,6 +373,8 @@ if environment.argument("watch") then scripts.watch.watch() elseif environment.argument("collect") then scripts.watch.save_logs(scripts.watch.collect_logs()) +elseif environment.argument("cleanup") then + scripts.watch.save_logs(scripts.watch.cleanup_stale_files()) elseif environment.argument("showlog") then scripts.watch.show_logs() else diff --git a/Master/texmf-dist/scripts/context/lua/mtxrun.lua b/Master/texmf-dist/scripts/context/lua/mtxrun.lua index 82d1edecbc5..b99327692d7 100755 --- a/Master/texmf-dist/scripts/context/lua/mtxrun.lua +++ b/Master/texmf-dist/scripts/context/lua/mtxrun.lua @@ -48,13 +48,16 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-string'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local sub, gsub, find, match, gmatch, format, char, byte, rep = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep +local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower +local lpegmatch = lpeg.match + +-- some functions may disappear as they are not used anywhere if not string.split then @@ -94,8 +97,16 @@ function string:unquote() return (gsub(self,"^([\"\'])(.*)%1$","%2")) end +--~ function string:unquote() +--~ if find(self,"^[\'\"]") then +--~ return sub(self,2,-2) +--~ else +--~ return self +--~ end +--~ end + function string:quote() -- we could use format("%q") - return '"' .. self:unquote() .. '"' + return format("%q",self) end function string:count(pattern) -- variant 3 @@ -115,12 +126,23 @@ function string:limit(n,sentinel) end end -function string:strip() - return (gsub(self,"^%s*(.-)%s*$", "%1")) +--~ function string:strip() -- the .- is quite efficient +--~ -- return match(self,"^%s*(.-)%s*$") or "" +--~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list +--~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)') +--~ end + +do -- roberto's variant: + local space = lpeg.S(" \t\v\n") + local nospace = 1 - space + local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0) + function string.strip(str) + return lpegmatch(stripper,str) or "" + end end function string:is_empty() - return not find(find,"%S") + return not find(self,"%S") end function string:enhance(pattern,action) @@ -154,14 +176,14 @@ if not string.characters then local function nextchar(str, index) index = index + 1 - return (index <= #str) and index or nil, str:sub(index,index) + return (index <= #str) and index or nil, sub(str,index,index) end function string:characters() return nextchar, self, 0 end local function nextbyte(str, index) index = index + 1 - return (index <= #str) and index or nil, byte(str:sub(index,index)) + return (index <= #str) and index or nil, byte(sub(str,index,index)) end function string:bytes() return nextbyte, self, 0 @@ -174,7 +196,7 @@ end function string:rpadd(n,chr) local m = n-#self if m > 0 then - return self .. self.rep(chr or " ",m) + return self .. rep(chr or " ",m) else return self end @@ -183,7 +205,7 @@ end function string:lpadd(n,chr) local m = n-#self if m > 0 then - return self.rep(chr or " ",m) .. self + return rep(chr or " ",m) .. self else return self end @@ -231,6 +253,17 @@ function string:pattesc() return (gsub(self,".",patterns_escapes)) end +local simple_escapes = { + ["-"] = "%-", + ["."] = "%.", + ["?"] = ".", + ["*"] = ".*", +} + +function string:simpleesc() + return (gsub(self,".",simple_escapes)) +end + function string:tohash() local t = { } for s in gmatch(self,"([^, ]+)") do -- lpeg @@ -242,10 +275,10 @@ end local pattern = lpeg.Ct(lpeg.C(1)^0) function string:totable() - return pattern:match(self) + return lpegmatch(pattern,self) end ---~ for _, str in ipairs { +--~ local t = { --~ "1234567123456712345671234567", --~ "a\tb\tc", --~ "aa\tbb\tcc", @@ -253,7 +286,10 @@ end --~ "aaaa\tbbbb\tcccc", --~ "aaaaa\tbbbbb\tccccc", --~ "aaaaaa\tbbbbbb\tcccccc", ---~ } do print(string.tabtospace(str)) end +--~ } +--~ for k,v do +--~ print(string.tabtospace(t[k])) +--~ end function string.tabtospace(str,tab) -- we don't handle embedded newlines @@ -261,7 +297,7 @@ function string.tabtospace(str,tab) local s = find(str,"\t") if s then if not tab then tab = 7 end -- only when found - local d = tab-(s-1)%tab + local d = tab-(s-1) % tab if d > 0 then str = gsub(str,"\t",rep(" ",d),1) else @@ -280,6 +316,25 @@ function string:compactlong() -- strips newlines and leading spaces return self end +function string:striplong() -- strips newlines and leading spaces + self = gsub(self,"^%s*","") + self = gsub(self,"[\n\r]+ *","\n") + return self +end + +function string:topattern(lowercase,strict) + if lowercase then + self = lower(self) + end + self = gsub(self,".",simple_escapes) + if self == "" then + self = ".*" + elseif strict then + self = "^" .. self .. "$" + end + return self +end + end -- of closure @@ -287,58 +342,64 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-lpeg'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc - ---~ l-lpeg.lua : - ---~ lpeg.digit = lpeg.R('09')^1 ---~ lpeg.sign = lpeg.S('+-')^1 ---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1) ---~ lpeg.number = lpeg.float + lpeg.integer ---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1 ---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1 ---~ lpeg.uppercase = lpeg.P("AZ") ---~ lpeg.lowercase = lpeg.P("az") - ---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed ---~ lpeg.space = lpeg.S(' ')^1 ---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1 ---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1 ---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1 - -local hash = { } +local lpeg = require("lpeg") + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + +local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V +local match = lpeg.match + +local digit, sign = R('09'), S('+-') +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local utf8byte = R("\128\191") + +patterns.utf8byte = utf8byte +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8byte +patterns.utf8three = R("\224\239") * utf8byte * utf8byte +patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte + +patterns.digit = digit +patterns.sign = sign +patterns.cardinal = sign^0 * digit^1 +patterns.integer = sign^0 * digit^1 +patterns.float = sign^0 * digit^0 * P('.') * digit^1 +patterns.number = patterns.float + patterns.integer +patterns.oct = P("0") * R("07")^1 +patterns.octal = patterns.oct +patterns.HEX = P("0x") * R("09","AF")^1 +patterns.hex = P("0x") * R("09","af")^1 +patterns.hexadecimal = P("0x") * R("09","AF","af")^1 +patterns.lowercase = R("az") +patterns.uppercase = R("AZ") +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = S(" ") +patterns.eol = S("\n\r") +patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) +patterns.newline = crlf + cr + lf +patterns.nonspace = 1 - patterns.space +patterns.nonspacer = 1 - patterns.spacer +patterns.whitespace = patterns.eol + patterns.spacer +patterns.nonwhitespace = 1 - patterns.whitespace +patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * lpeg.V(1) } -end - -function lpeg.startswith(pattern) --slightly adapted - return P(pattern) + return P { P(pattern) + 1 * V(1) } -- why so complex? end function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end --- variant: - ---~ local parser = lpeg.Ct(lpeg.splitat(newline)) - -local crlf = P("\r\n") -local cr = P("\r") -local lf = P("\n") -local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -local newline = crlf + cr + lf -local spacing = space^0 * newline - +local spacing = patterns.spacer^0 * patterns.newline -- sort of strip local empty = spacing * Cc("") local nonempty = Cs((1-spacing)^1) * spacing^-1 local content = (empty + nonempty)^1 @@ -346,15 +407,15 @@ local content = (empty + nonempty)^1 local capture = Ct(content^0) function string:splitlines() - return capture:match(self) + return match(capture,self) end -lpeg.linebyline = content -- better make a sublibrary +patterns.textline = content ---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more ---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more ---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps ---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps local splitters_s, splitters_m = { }, { } @@ -364,7 +425,7 @@ local function splitat(separator,single) separator = P(separator) if single then local other, any = C((1 - separator)^0), P(1) - splitter = other * (separator * C(any^0) + "") + splitter = other * (separator * C(any^0) + "") -- ? splitters_s[separator] = splitter else local other = C((1 - separator)^0) @@ -379,15 +440,72 @@ lpeg.splitat = splitat local cache = { } +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = Ct(splitat(separator)) + cache[separator] = c + end + return match(c,str) +end + function string:split(separator) local c = cache[separator] if not c then c = Ct(splitat(separator)) cache[separator] = c end - return c:match(self) + return match(c,self) +end + +lpeg.splitters = cache + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,str) +end + +function string:checkedsplit(separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,self) end +--~ function lpeg.append(list,pp) +--~ local p = pp +--~ for l=1,#list do +--~ if p then +--~ p = p + P(list[l]) +--~ else +--~ p = P(list[l]) +--~ end +--~ end +--~ return p +--~ end + +--~ from roberto's site: + +local f1 = string.byte + +local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + end -- of closure @@ -395,7 +513,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-table'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -404,9 +522,58 @@ if not modules then modules = { } end modules ['l-table'] = { table.join = table.concat local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove -local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump +local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match local getmetatable, setmetatable = getmetatable, setmetatable -local type, next, tostring, ipairs = type, next, tostring, ipairs +local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs + +-- Starting with version 5.2 Lua no longer provide ipairs, which makes +-- sense. As we already used the for loop and # in most places the +-- impact on ConTeXt was not that large; the remaining ipairs already +-- have been replaced. In a similar fashio we also hardly used pairs. +-- +-- Just in case, we provide the fallbacks as discussed in Programming +-- in Lua (http://www.lua.org/pil/7.3.html): + +if not ipairs then + + -- for k, v in ipairs(t) do ... end + -- for k=1,#t do local v = t[k] ... end + + local function iterate(a,i) + i = i + 1 + local v = a[i] + if v ~= nil then + return i, v --, nil + end + end + + function ipairs(a) + return iterate, a, 0 + end + +end + +if not pairs then + + -- for k, v in pairs(t) do ... end + -- for k, v in next, t do ... end + + function pairs(t) + return next, t -- , nil + end + +end + +-- Also, unpack has been moved to the table table, and for compatiility +-- reasons we provide both now. + +if not table.unpack then + table.unpack = _G.unpack +elseif not unpack then + _G.unpack = table.unpack +end + +-- extra functions, some might go (when not used) function table.strip(tab) local lst = { } @@ -421,6 +588,14 @@ function table.strip(tab) return lst end +function table.keys(t) + local k = { } + for key, _ in next, t do + k[#k+1] = key + end + return k +end + local function compare(a,b) return (tostring(a) < tostring(b)) end @@ -464,7 +639,7 @@ end table.sortedkeys = sortedkeys table.sortedhashkeys = sortedhashkeys -function table.sortedpairs(t) +function table.sortedhash(t) local s = sortedhashkeys(t) -- maybe just sortedkeys local n = 0 local function kv(s) @@ -475,6 +650,8 @@ function table.sortedpairs(t) return kv, s end +table.sortedpairs = table.sortedhash + function table.append(t, list) for _,v in next, list do insert(t,v) @@ -583,7 +760,7 @@ end table.fastcopy = fastcopy table.copy = copy --- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) +-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) function table.sub(t,i,j) return { unpack(t,i,j) } @@ -597,18 +774,18 @@ end -- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) -function table.is_empty(t) +function table.is_empty(t) -- obolete, use inline code instead return not t or not next(t) end -function table.one_entry(t) +function table.one_entry(t) -- obolete, use inline code instead local n = next(t) return n and not next(t,n) end -function table.starts_at(t) - return ipairs(t,1)(t,0) -end +--~ function table.starts_at(t) -- obsolete, not nice anyway +--~ return ipairs(t,1)(t,0) +--~ end function table.tohash(t,value) local h = { } @@ -686,6 +863,8 @@ end -- -- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) +-- problem: there no good number_to_string converter with the best resolution + local function do_serialize(root,name,depth,level,indexed) if level > 0 then depth = depth .. " " @@ -708,8 +887,9 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s{",depth)) end end + -- we could check for k (index) being number (cardinal) if root and next(root) then - local first, last = nil, 0 -- #root cannot be trusted here + local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) if compact then -- NOT: for k=1,#root do (we need to quit at nil) for k,v in ipairs(root) do -- can we use next? @@ -730,10 +910,10 @@ local function do_serialize(root,name,depth,level,indexed) if hexify then handle(format("%s 0x%04X,",depth,v)) else - handle(format("%s %s,",depth,v)) + handle(format("%s %s,",depth,v)) -- %.99g end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) @@ -770,29 +950,29 @@ local function do_serialize(root,name,depth,level,indexed) --~ if hexify then --~ handle(format("%s %s=0x%04X,",depth,key(k),v)) --~ else - --~ handle(format("%s %s=%s,",depth,key(k),v)) + --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g --~ end if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) else - handle(format("%s [%s]=%s,",depth,k,v)) + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g end elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then if hexify then handle(format("%s %s=0x%04X,",depth,k,v)) else - handle(format("%s %s=%s,",depth,k,v)) + handle(format("%s %s=%s,",depth,k,v)) -- %.99g end else if hexify then handle(format("%s [%q]=0x%04X,",depth,k,v)) else - handle(format("%s [%q]=%s,",depth,k,v)) + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g end end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then --~ handle(format("%s %s=%s,",depth,key(k),v)) if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then @@ -1001,7 +1181,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify) end end -local function flatten(t,f,complete) +local function flatten(t,f,complete) -- is this used? meybe a variant with next, ... for i=1,#t do local v = t[i] if type(v) == "table" then @@ -1030,6 +1210,24 @@ end table.flatten_one_level = table.unnest +-- a better one: + +local function flattened(t,f) + if not f then + f = { } + end + for k, v in next, t do + if type(v) == "table" then + flattened(v,f) + else + f[k] = v + end + end + return f +end + +table.flattened = flattened + -- the next three may disappear function table.remove_value(t,value) -- todo: n @@ -1165,7 +1363,7 @@ function table.clone(t,p) -- t is optional or nil or table elseif not t then t = { } end - setmetatable(t, { __index = function(_,key) return p[key] end }) + setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ? return t end @@ -1193,21 +1391,36 @@ function table.reverse(t) return tt end ---~ function table.keys(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return k ---~ end +function table.insert_before_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i,extra) + return + end + end + insert(t,1,extra) +end + +function table.insert_after_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i+1,extra) + return + end + end + insert(t,#t+1,extra) +end ---~ function table.keys_as_string(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return concat(k,"") ---~ end end -- of closure @@ -1216,13 +1429,13 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-io'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local byte = string.byte +local byte, find, gsub = string.byte, string.find, string.gsub if string.find(os.getenv("PATH"),";") then io.fileseparator, io.pathseparator = "\\", ";" @@ -1251,7 +1464,7 @@ function io.savedata(filename,data,joiner) elseif type(data) == "function" then data(f) else - f:write(data) + f:write(data or "") end f:close() return true @@ -1380,20 +1593,21 @@ function io.ask(question,default,options) end io.write(string.format(" ")) local answer = io.read() - answer = answer:gsub("^%s*(.*)%s*$","%1") + answer = gsub(answer,"^%s*(.*)%s*$","%1") if answer == "" and default then return default elseif not options then return answer else - for _,v in pairs(options) do - if v == answer then + for k=1,#options do + if options[k] == answer then return answer end end local pattern = "^" .. answer - for _,v in pairs(options) do - if v:find(pattern) then + for k=1,#options do + local v = options[k] + if find(v,pattern) then return v end end @@ -1408,20 +1622,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-number'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format = string.format +local tostring = tostring +local format, floor, insert, match = string.format, math.floor, table.insert, string.match +local lpegmatch = lpeg.match number = number or { } -- a,b,c,d,e,f = number.toset(100101) function number.toset(n) - return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") + return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") end function number.toevenhex(n) @@ -1447,10 +1663,21 @@ end local one = lpeg.C(1-lpeg.S(''))^1 function number.toset(n) - return one:match(tostring(n)) + return lpegmatch(one,tostring(n)) end - +function number.bits(n,zero) + local t, i = { }, (zero and 0) or 1 + while n > 0 do + local m = n % 2 + if m > 0 then + insert(t,1,i) + end + n = floor(n/2) + i = i + 1 + end + return t +end end -- of closure @@ -1459,7 +1686,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-set'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1549,46 +1776,63 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-os'] = { version = 1.001, - comment = "companion to luat-lub.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local find = string.find +-- maybe build io.flush in os.execute + +local find, format, gsub = string.find, string.format, string.gsub +local random, ceil = math.random, math.ceil + +local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush + +function os.execute(...) ioflush() return execute(...) end +function os.spawn (...) ioflush() return spawn (...) end +function os.exec (...) ioflush() return exec (...) end function os.resultof(command) - return io.popen(command,"r"):read("*all") + ioflush() -- else messed up logging + local handle = io.popen(command,"r") + if not handle then + -- print("unknown command '".. command .. "' in os.resultof") + return "" + else + return handle:read("*all") or "" + end end -if not os.exec then os.exec = os.execute end -if not os.spawn then os.spawn = os.execute end - ---~ os.type : windows | unix (new, we already guessed os.platform) ---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.type : windows | unix (new, we already guessed os.platform) +--~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.platform : extended os.name with architecture if not io.fileseparator then if find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator, os.platform = "\\", ";", os.type or "windows" + io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" else - io.fileseparator, io.pathseparator, os.platform = "/" , ":", os.type or "unix" + io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" end end -os.platform = os.platform or os.type or (io.pathseparator == ";" and "windows") or "unix" +os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" +os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" + +if os.type == "windows" then + os.libsuffix, os.binsuffix = 'dll', 'exe' +else + os.libsuffix, os.binsuffix = 'so', '' +end function os.launch(str) - if os.platform == "windows" then + if os.type == "windows" then os.execute("start " .. str) -- os.spawn ? else os.execute(str .. " &") -- os.spawn ? end end -if not os.setenv then - function os.setenv() return false end -end - if not os.times then -- utime = user time -- stime = system time @@ -1618,64 +1862,218 @@ end --~ print(os.date("%H:%M:%S",os.gettimeofday())) --~ print(os.date("%H:%M:%S",os.time())) -os.arch = os.arch or function() - local a = os.resultof("uname -m") or "linux" - os.arch = function() - return a +-- no need for function anymore as we have more clever code and helpers now +-- this metatable trickery might as well disappear + +os.resolvers = os.resolvers or { } + +local resolvers = os.resolvers + +local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil +local osix = osmt.__index + +osmt.__index = function(t,k) + return (resolvers[k] or osix)(t,k) +end + +setmetatable(os,osmt) + +if not os.setenv then + + -- we still store them but they won't be seen in + -- child processes although we might pass them some day + -- using command concatination + + local env, getenv = { }, os.getenv + + function os.setenv(k,v) + env[k] = v + end + + function os.getenv(k) + return env[k] or getenv(k) end - return a + end -local platform +-- we can use HOSTTYPE on some platforms -function os.currentplatform(name,default) - if not platform then - local name = os.name or os.platform or name -- os.name is built in, os.platform is mine - if not name then - platform = default or "linux" - elseif name == "windows" or name == "mswin" or name == "win32" or name == "msdos" then - if os.getenv("PROCESSOR_ARCHITECTURE") == "AMD64" then - platform = "mswin-64" - else - platform = "mswin" - end +local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" + +local function guess() + local architecture = os.resultof("uname -m") or "" + if architecture ~= "" then + return architecture + end + architecture = os.getenv("HOSTTYPE") or "" + if architecture ~= "" then + return architecture + end + return os.resultof("echo $HOSTTYPE") or "" +end + +if platform ~= "" then + + os.platform = platform + +elseif os.type == "windows" then + + -- we could set the variable directly, no function needed here + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" + if find(architecture,"AMD64") then + platform = "mswin-64" else - local architecture = os.arch() - if name == "linux" then - if find(architecture,"x86_64") then - platform = "linux-64" - elseif find(architecture,"ppc") then - platform = "linux-ppc" - else - platform = "linux" - end - elseif name == "macosx" then - if find(architecture,"i386") then - platform = "osx-intel" - else - platform = "osx-ppc" - end - elseif name == "sunos" then - if find(architecture,"sparc") then - platform = "solaris-sparc" - else -- if architecture == 'i86pc' - platform = "solaris-intel" - end - elseif name == "freebsd" then - if find(architecture,"amd64") then - platform = "freebsd-amd64" - else - platform = "freebsd" - end - else - platform = default or name - end + platform = "mswin" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "linux" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "linux-64" + elseif find(architecture,"ppc") then + platform = "linux-ppc" + else + platform = "linux" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "macosx" then + + --[[ + Identifying the architecture of OSX is quite a mess and this + is the best we can come up with. For some reason $HOSTTYPE is + a kind of pseudo environment variable, not known to the current + environment. And yes, uname cannot be trusted either, so there + is a change that you end up with a 32 bit run on a 64 bit system. + Also, some proper 64 bit intel macs are too cheap (low-end) and + therefore not permitted to run the 64 bit kernel. + ]]-- + + function os.resolvers.platform(t,k) + -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" + -- if architecture == "" then + -- architecture = os.resultof("echo $HOSTTYPE") or "" + -- end + local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" + if architecture == "" then + -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") + platform = "osx-intel" + elseif find(architecture,"i386") then + platform = "osx-intel" + elseif find(architecture,"x86_64") then + platform = "osx-64" + else + platform = "osx-ppc" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "sunos" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"sparc") then + platform = "solaris-sparc" + else -- if architecture == 'i86pc' + platform = "solaris-intel" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "freebsd" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"amd64") then + platform = "freebsd-amd64" + else + platform = "freebsd" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "kfreebsd" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "kfreebsd-64" + else + platform = "kfreebsd-i386" end - function os.currentplatform() - return platform + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +else + + -- platform = "linux" + -- os.setenv("MTX_PLATFORM",platform) + -- os.platform = platform + + function os.resolvers.platform(t,k) + local platform = "linux" + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +end + +-- beware, we set the randomseed + +-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the +-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom +-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal +-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. +-- +-- as we don't call this function too often there is not so much risk on repetition + +local t = { 8, 9, "a", "b" } + +function os.uuid() + return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", + random(0xFFFF),random(0xFFFF), + random(0x0FFF), + t[ceil(random(4))] or 8,random(0x0FFF), + random(0xFFFF), + random(0xFFFF),random(0xFFFF),random(0xFFFF) + ) +end + +local d + +function os.timezone(delta) + d = d or tonumber(tonumber(os.date("%H")-os.date("!%H"))) + if delta then + if d > 0 then + return format("+%02i:00",d) + else + return format("-%02i:00",-d) end + else + return 1 end - return platform end @@ -1685,7 +2083,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-file'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1696,14 +2094,17 @@ if not modules then modules = { } end modules ['l-file'] = { file = file or { } local concat = table.concat -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char +local lpegmatch = lpeg.match function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) end function file.addsuffix(filename, suffix) - if not find(filename,"%.[%a%d]+$") then + if not suffix or suffix == "" then + return filename + elseif not find(filename,"%.[%a%d]+$") then return filename .. "." .. suffix else return filename @@ -1726,20 +2127,39 @@ function file.nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$","")) end -function file.extname(name) - return match(name,"^.+%.([^/\\]-)$") or "" +function file.extname(name,default) + return match(name,"^.+%.([^/\\]-)$") or default or "" end file.suffix = file.extname ---~ print(file.join("x/","/y")) ---~ print(file.join("http://","/y")) ---~ print(file.join("http://a","/y")) ---~ print(file.join("http:///a","/y")) ---~ print(file.join("//nas-1","/y")) +--~ function file.join(...) +--~ local pth = concat({...},"/") +--~ pth = gsub(pth,"\\","/") +--~ local a, b = match(pth,"^(.*://)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ a, b = match(pth,"^(//)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ return (gsub(pth,"//+","/")) +--~ end + +local trick_1 = char(1) +local trick_2 = "^" .. trick_1 .. "/+" function file.join(...) - local pth = concat({...},"/") + local lst = { ... } + local a, b = lst[1], lst[2] + if a == "" then + lst[1] = trick_1 + elseif b and find(a,"^/+$") and find(b,"^/") then + lst[1] = "" + lst[2] = gsub(b,"^/+","") + end + local pth = concat(lst,"/") pth = gsub(pth,"\\","/") local a, b = match(pth,"^(.*://)(.*)$") if a and b then @@ -1749,17 +2169,28 @@ function file.join(...) if a and b then return a .. gsub(b,"//+","/") end + pth = gsub(pth,trick_2,"") return (gsub(pth,"//+","/")) end +--~ print(file.join("//","/y")) +--~ print(file.join("/","/y")) +--~ print(file.join("","/y")) +--~ print(file.join("/x/","/y")) +--~ print(file.join("x/","/y")) +--~ print(file.join("http://","/y")) +--~ print(file.join("http://a","/y")) +--~ print(file.join("http:///a","/y")) +--~ print(file.join("//nas-1","/y")) + function file.iswritable(name) local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,".")) - return a and a.permissions:sub(2,2) == "w" + return a and sub(a.permissions,2,2) == "w" end function file.isreadable(name) local a = lfs.attributes(name) - return a and a.permissions:sub(1,1) == "r" + return a and sub(a.permissions,1,1) == "r" end file.is_readable = file.isreadable @@ -1767,36 +2198,50 @@ file.is_writable = file.iswritable -- todo: lpeg -function file.split_path(str) - local t = { } - str = gsub(str,"\\", "/") - str = gsub(str,"(%a):([;/])", "%1\001%2") - for name in gmatch(str,"([^;:]+)") do - if name ~= "" then - t[#t+1] = gsub(name,"\001",":") - end - end - return t +--~ function file.split_path(str) +--~ local t = { } +--~ str = gsub(str,"\\", "/") +--~ str = gsub(str,"(%a):([;/])", "%1\001%2") +--~ for name in gmatch(str,"([^;:]+)") do +--~ if name ~= "" then +--~ t[#t+1] = gsub(name,"\001",":") +--~ end +--~ end +--~ return t +--~ end + +local checkedsplit = string.checkedsplit + +function file.split_path(str,separator) + str = gsub(str,"\\","/") + return checkedsplit(str,separator or io.pathseparator) end function file.join_path(tab) return concat(tab,io.pathseparator) -- can have trailing // end +-- we can hash them weakly + function file.collapse_path(str) - str = gsub(str,"/%./","/") - local n, m = 1, 1 - while n > 0 or m > 0 do - str, n = gsub(str,"[^/%.]+/%.%.$","") - str, m = gsub(str,"[^/%.]+/%.%./","") - end - str = gsub(str,"([^/])/$","%1") - str = gsub(str,"^%./","") - str = gsub(str,"/%.$","") + str = gsub(str,"\\","/") + if find(str,"/") then + str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified + str = gsub(str,"/%./","/") + local n, m = 1, 1 + while n > 0 or m > 0 do + str, n = gsub(str,"[^/%.]+/%.%.$","") + str, m = gsub(str,"[^/%.]+/%.%./","") + end + str = gsub(str,"([^/])/$","%1") + -- str = gsub(str,"^%./","") -- ./xx in qualified + str = gsub(str,"/%.$","") + end if str == "" then str = "." end return str end +--~ print(file.collapse_path("/a")) --~ print(file.collapse_path("a/./b/..")) --~ print(file.collapse_path("a/aa/../b/bb")) --~ print(file.collapse_path("a/../..")) @@ -1826,27 +2271,27 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1 --~ function file.extname(name) ---~ return pattern:match(name) or "" +--~ return lpegmatch(pattern,name) or "" --~ end --~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1) --~ function file.removesuffix(name) ---~ return pattern:match(name) +--~ return lpegmatch(pattern,name) --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1 --~ function file.basename(name) ---~ return pattern:match(name) or name +--~ return lpegmatch(pattern,name) or name --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1 --~ function file.dirname(name) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) +--~ return sub(name,1,p-2) --~ else --~ return "" --~ end @@ -1855,7 +2300,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.addsuffix(name, suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then --~ return name --~ else @@ -1866,9 +2311,9 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.replacesuffix(name,suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) .. "." .. suffix +--~ return sub(name,1,p-2) .. "." .. suffix --~ else --~ return name .. "." .. suffix --~ end @@ -1877,11 +2322,11 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1 --~ function file.nameonly(name) ---~ local a, b = pattern:match(name) +--~ local a, b = lpegmatch(pattern,name) --~ if b then ---~ return name:sub(a,b-2) +--~ return sub(name,a,b-2) --~ elseif a then ---~ return name:sub(a) +--~ return sub(name,a) --~ else --~ return name --~ end @@ -1915,11 +2360,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":") -- ./name ../name /name c: :// name/name function file.is_qualified_path(filename) - return qualified:match(filename) + return lpegmatch(qualified,filename) ~= nil end function file.is_rootbased_path(filename) - return rootbased:match(filename) + return lpegmatch(rootbased,filename) ~= nil end local slash = lpeg.S("\\/") @@ -1932,16 +2377,25 @@ local base = lpeg.C((1-suffix)^0) local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc("")) function file.splitname(str) -- returns drive, path, base, suffix - return pattern:match(str) + return lpegmatch(pattern,str) end --- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end -- -- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } -- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } -- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } -- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } +--~ -- todo: +--~ +--~ if os.type == "windows" then +--~ local currentdir = lfs.currentdir +--~ function lfs.currentdir() +--~ return (gsub(currentdir(),"\\","/")) +--~ end +--~ end + end -- of closure @@ -2006,7 +2460,7 @@ end function file.loadchecksum(name) if md5 then local data = io.loaddata(name .. ".md5") - return data and data:gsub("%s","") + return data and (gsub(data,"%s","")) end return nil end @@ -2025,19 +2479,168 @@ end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['l-dir'] = { +if not modules then modules = { } end modules ['l-url'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local char, gmatch, gsub = string.char, string.gmatch, string.gsub +local tonumber, type = tonumber, type +local lpegmatch = lpeg.match + +-- from the spec (on the web): +-- +-- foo://example.com:8042/over/there?name=ferret#nose +-- \_/ \______________/\_________/ \_________/ \__/ +-- | | | | | +-- scheme authority path query fragment +-- | _____________________|__ +-- / \ / \ +-- urn:example:animal:ferret:nose + +url = url or { } + +local function tochar(s) + return char(tonumber(s,16)) +end + +local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1) + +local hexdigit = lpeg.R("09","AF","af") +local plus = lpeg.P("+") +local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar) + +-- we assume schemes with more than 1 character (in order to avoid problems with windows disks) + +local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("") +local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("") +local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("") +local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("") +local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("") + +local parser = lpeg.Ct(scheme * authority * path * query * fragment) + +-- todo: reconsider Ct as we can as well have five return values (saves a table) +-- so we can have two parsers, one with and one without + +function url.split(str) + return (type(str) == "string" and lpegmatch(parser,str)) or str +end + +-- todo: cache them + +function url.hashed(str) + local s = url.split(str) + local somescheme = s[1] ~= "" + return { + scheme = (somescheme and s[1]) or "file", + authority = s[2], + path = s[3], + query = s[4], + fragment = s[5], + original = str, + noscheme = not somescheme, + } +end + +function url.hasscheme(str) + return url.split(str)[1] ~= "" +end + +function url.addscheme(str,scheme) + return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str) +end + +function url.construct(hash) + local fullurl = hash.sheme .. "://".. hash.authority .. hash.path + if hash.query then + fullurl = fullurl .. "?".. hash.query + end + if hash.fragment then + fullurl = fullurl .. "?".. hash.fragment + end + return fullurl +end + +function url.filename(filename) + local t = url.hashed(filename) + return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename +end + +function url.query(str) + if type(str) == "string" then + local t = { } + for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do + t[k] = v + end + return t + else + return str + end +end + +--~ print(url.filename("file:///c:/oeps.txt")) +--~ print(url.filename("c:/oeps.txt")) +--~ print(url.filename("file:///oeps.txt")) +--~ print(url.filename("file:///etc/test.txt")) +--~ print(url.filename("/oeps.txt")) + +--~ from the spec on the web (sort of): +--~ +--~ function test(str) +--~ print(table.serialize(url.hashed(str))) +--~ end +--~ +--~ test("%56pass%20words") +--~ test("file:///c:/oeps.txt") +--~ test("file:///c|/oeps.txt") +--~ test("file:///etc/oeps.txt") +--~ test("file://./etc/oeps.txt") +--~ test("file:////etc/oeps.txt") +--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt") +--~ test("http://www.ietf.org/rfc/rfc2396.txt") +--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what") +--~ test("mailto:John.Doe@example.com") +--~ test("news:comp.infosystems.www.servers.unix") +--~ test("tel:+1-816-555-1212") +--~ test("telnet://192.0.2.16:80/") +--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2") +--~ test("/etc/passwords") +--~ test("http://www.pragma-ade.com/spaced%20name") + +--~ test("zip:///oeps/oeps.zip#bla/bla.tex") +--~ test("zip:///oeps/oeps.zip?bla/bla.tex") + + +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['l-dir'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- dir.expand_name will be merged with cleanpath and collapsepath + local type = type -local find, gmatch = string.find, string.gmatch +local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local lpegmatch = lpeg.match dir = dir or { } +-- handy + +function dir.current() + return (gsub(lfs.currentdir(),"\\","/")) +end + -- optimizing for no string.find (*) does not save time local attributes = lfs.attributes @@ -2068,6 +2671,35 @@ end dir.glob_pattern = glob_pattern +local function collect_pattern(path,patt,recurse,result) + local ok, scanner + result = result or { } + if path == "/" then + ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe + else + ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe + end + if ok and type(scanner) == "function" then + if not find(path,"/$") then path = path .. '/' end + for name in scanner do + local full = path .. name + local attr = attributes(full) + local mode = attr.mode + if mode == 'file' then + if find(full,patt) then + result[name] = attr + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + attr.list = collect_pattern(full,patt,recurse) + result[name] = attr + end + end + end + return result +end + +dir.collect_pattern = collect_pattern + local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V local pattern = Ct { @@ -2087,29 +2719,48 @@ local filter = Cs ( ( )^0 ) local function glob(str,t) - if type(str) == "table" then - local t = t or { } - for s=1,#str do - glob(str[s],t) + if type(t) == "function" then + if type(str) == "table" then + for s=1,#str do + glob(str[s],t) + end + elseif lfs.isfile(str) then + t(str) + else + local split = lpegmatch(pattern,str) + if split then + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,t) + end end - return t - elseif lfs.isfile(str) then - local t = t or { } - t[#t+1] = str - return t else - local split = pattern:match(str) - if split then + if type(str) == "table" then + local t = t or { } + for s=1,#str do + glob(str[s],t) + end + return t + elseif lfs.isfile(str) then local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = filter:match(start .. base) - glob_pattern(start,result,recurse,action) + t[#t+1] = str return t else - return { } + local split = lpegmatch(pattern,str) + if split then + local t = t or { } + local action = action or function(name) t[#t+1] = name end + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,action) + return t + else + return { } + end end end end @@ -2171,11 +2822,12 @@ end local make_indeed = true -- false -if string.find(os.getenv("PATH"),";") then +if string.find(os.getenv("PATH"),";") then -- os.type == "windows" function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2186,13 +2838,13 @@ if string.find(os.getenv("PATH"),";") then end local first, middle, last local drive = false - first, middle, last = str:match("^(//)(//*)(.*)$") + first, middle, last = match(str,"^(//)(//*)(.*)$") if first then -- empty network path == local path else - first, last = str:match("^(//)/*(.-)$") + first, last = match(str,"^(//)/*(.-)$") if first then - middle, last = str:match("([^/]+)/+(.-)$") + middle, last = match(str,"([^/]+)/+(.-)$") if middle then pth = "//" .. middle else @@ -2200,11 +2852,11 @@ if string.find(os.getenv("PATH"),";") then last = "" end else - first, middle, last = str:match("^([a-zA-Z]:)(/*)(.-)$") + first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") if first then pth, drive = first .. middle, true else - middle, last = str:match("^(/*)(.-)$") + middle, last = match(str,"^(/*)(.-)$") if not middle then last = str end @@ -2238,34 +2890,31 @@ if string.find(os.getenv("PATH"),";") then --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) - local first, nothing, last = str:match("^(//)(//*)(.*)$") + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath + local first, nothing, last = match(str,"^(//)(//*)(.*)$") if first then - first = lfs.currentdir() .. "/" - first = first:gsub("\\","/") + first = dir.current() .. "/" end if not first then - first, last = str:match("^(//)/*(.*)$") + first, last = match(str,"^(//)/*(.*)$") end if not first then - first, last = str:match("^([a-zA-Z]:)(.*)$") + first, last = match(str,"^([a-zA-Z]:)(.*)$") if first and not find(last,"^/") then local d = lfs.currentdir() if lfs.chdir(first) then - first = lfs.currentdir() - first = first:gsub("\\","/") + first = dir.current() end lfs.chdir(d) end end if not first then - first, last = lfs.currentdir(), str - first = first:gsub("\\","/") + first, last = dir.current(), str end - last = last:gsub("//","/") - last = last:gsub("/%./","/") - last = last:gsub("^/*","") - first = first:gsub("/*$","") + last = gsub(last,"//","/") + last = gsub(last,"/%./","/") + last = gsub(last,"^/*","") + first = gsub(first,"/*$","") if last == "" then return first else @@ -2276,8 +2925,9 @@ if string.find(os.getenv("PATH"),";") then else function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2286,7 +2936,7 @@ else end end end - str = str:gsub("/+","/") + str = gsub(str,"/+","/") if find(str,"^/") then pth = "/" for s in gmatch(str,"[^/]+") do @@ -2320,12 +2970,12 @@ else --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath if not find(str,"^/") then str = lfs.currentdir() .. "/" .. str end - str = str:gsub("//","/") - str = str:gsub("/%./","/") + str = gsub(str,"//","/") + str = gsub(str,"/%./","/") return str end @@ -2340,7 +2990,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-boolean'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2401,7 +3051,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-math'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2448,7 +3098,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-utils'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2456,6 +3106,10 @@ if not modules then modules = { } end modules ['l-utils'] = { -- hm, quite unreadable +local gsub = string.gsub +local concat = table.concat +local type, next = type, next + if not utils then utils = { } end if not utils.merger then utils.merger = { } end if not utils.lua then utils.lua = { } end @@ -2493,7 +3147,7 @@ function utils.merger._self_load_(name) end if data and utils.merger.strip_comment then -- saves some 20K - data = data:gsub("%-%-~[^\n\r]*[\r\n]", "") + data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "") end return data or "" end @@ -2511,7 +3165,7 @@ end function utils.merger._self_swap_(data,code) if data ~= "" then - return (data:gsub(utils.merger.pattern, function(s) + return (gsub(data,utils.merger.pattern, function(s) return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n" end, 1)) else @@ -2521,8 +3175,8 @@ end --~ stripper: --~ ---~ data = string.gsub(data,"%-%-~[^\n]*\n","") ---~ data = string.gsub(data,"\n\n+","\n") +--~ data = gsub(data,"%-%-~[^\n]*\n","") +--~ data = gsub(data,"\n\n+","\n") function utils.merger._self_libs_(libs,list) local result, f, frozen = { }, nil, false @@ -2530,9 +3184,10 @@ function utils.merger._self_libs_(libs,list) if type(libs) == 'string' then libs = { libs } end if type(list) == 'string' then list = { list } end local foundpath = nil - for _, lib in ipairs(libs) do - for _, pth in ipairs(list) do - pth = string.gsub(pth,"\\","/") -- file.clean_path + for i=1,#libs do + local lib = libs[i] + for j=1,#list do + local pth = gsub(list[j],"\\","/") -- file.clean_path utils.report("checking library path %s",pth) local name = pth .. "/" .. lib if lfs.isfile(name) then @@ -2544,7 +3199,8 @@ function utils.merger._self_libs_(libs,list) if foundpath then utils.report("using library path %s",foundpath) local right, wrong = { }, { } - for _, lib in ipairs(libs) do + for i=1,#libs do + local lib = libs[i] local fullname = foundpath .. "/" .. lib if lfs.isfile(fullname) then -- right[#right+1] = lib @@ -2558,15 +3214,15 @@ function utils.merger._self_libs_(libs,list) end end if #right > 0 then - utils.report("merged libraries: %s",table.concat(right," ")) + utils.report("merged libraries: %s",concat(right," ")) end if #wrong > 0 then - utils.report("skipped libraries: %s",table.concat(wrong," ")) + utils.report("skipped libraries: %s",concat(wrong," ")) end else utils.report("no valid library path found") end - return table.concat(result, "\n\n") + return concat(result, "\n\n") end function utils.merger.selfcreate(libs,list,target) @@ -2624,16 +3280,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end + aux = aux or { } local concat, format, gmatch = table.concat, string.format, string.gmatch local tostring, type = tostring, type +local lpegmatch = lpeg.match + +local P, R, V = lpeg.P, lpeg.R, lpeg.V + +local escape, left, right = P("\\"), P('{'), P('}') + +lpeg.patterns.balanced = P { + [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, + [2] = left * V(1) * right +} local space = lpeg.P(' ') local equal = lpeg.P("=") @@ -2641,7 +3309,7 @@ local comma = lpeg.P(",") local lbrace = lpeg.P("{") local rbrace = lpeg.P("}") local nobrace = 1 - (lbrace+rbrace) -local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace } +local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace } local spaces = space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) @@ -2679,13 +3347,13 @@ function aux.make_settings_to_hash_pattern(set,how) end end -function aux.settings_to_hash(str) +function aux.settings_to_hash(str,existing) if str and str ~= "" then - hash = { } + hash = existing or { } if moretolerant then - pattern_b_s:match(str) + lpegmatch(pattern_b_s,str) else - pattern_a_s:match(str) + lpegmatch(pattern_a_s,str) end return hash else @@ -2693,39 +3361,41 @@ function aux.settings_to_hash(str) end end -function aux.settings_to_hash_tolerant(str) +function aux.settings_to_hash_tolerant(str,existing) if str and str ~= "" then - hash = { } - pattern_b_s:match(str) + hash = existing or { } + lpegmatch(pattern_b_s,str) return hash else return { } end end -function aux.settings_to_hash_strict(str) +function aux.settings_to_hash_strict(str,existing) if str and str ~= "" then - hash = { } - pattern_c_s:match(str) + hash = existing or { } + lpegmatch(pattern_c_s,str) return next(hash) and hash else return nil end end -local seperator = comma * space^0 +local separator = comma * space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) -local pattern = lpeg.Ct(value*(seperator*value)^0) +local pattern = lpeg.Ct(value*(separator*value)^0) -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored aux.settings_to_array_pattern = pattern +-- we could use a weak table as cache + function aux.settings_to_array(str) if not str or str == "" then return { } else - return pattern:match(str) + return lpegmatch(pattern,str) end end @@ -2734,10 +3404,10 @@ local function set(t,v) end local value = lpeg.P(lpeg.Carg(1)*value) / set -local pattern = value*(seperator*value)^0 * lpeg.Carg(1) +local pattern = value*(separator*value)^0 * lpeg.Carg(1) function aux.add_settings_to_array(t,str) - return pattern:match(str, nil, t) + return lpegmatch(pattern,str,nil,t) end function aux.hash_to_string(h,separator,yes,no,strict,omit) @@ -2785,6 +3455,13 @@ function aux.settings_to_set(str,t) return t end +local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace +local pattern = lpeg.Ct((space + value)^0) + +function aux.arguments_to_table(str) + return lpegmatch(pattern,str) +end + -- temporary here function aux.getparameters(self,class,parentclass,settings) @@ -2793,36 +3470,31 @@ function aux.getparameters(self,class,parentclass,settings) sc = table.clone(self[parent]) self[class] = sc end - aux.add_settings_to_array(sc, settings) + aux.settings_to_hash(settings,sc) end -- temporary here -local digit = lpeg.R("09") -local period = lpeg.P(".") -local zero = lpeg.P("0") - ---~ local finish = lpeg.P(-1) ---~ local nodigit = (1-digit) + finish ---~ local case_1 = (period * zero^1 * #nodigit)/"" -- .000 ---~ local case_2 = (period * (1-(zero^0/"") * #nodigit)^1 * (zero^0/"") * nodigit) -- .010 .10 .100100 - +local digit = lpeg.R("09") +local period = lpeg.P(".") +local zero = lpeg.P("0") local trailingzeros = zero^0 * -digit -- suggested by Roberto R -local case_1 = period * trailingzeros / "" -local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") - -local number = digit^1 * (case_1 + case_2) -local stripper = lpeg.Cs((number + 1)^0) +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = lpeg.Cs((number + 1)^0) --~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" --~ collectgarbage("collect") --~ str = string.rep(sample,10000) --~ local ts = os.clock() ---~ stripper:match(str) ---~ print(#str, os.clock()-ts, stripper:match(sample)) +--~ lpegmatch(stripper,str) +--~ print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + +lpeg.patterns.strip_zeros = stripper function aux.strip_zeros(str) - return stripper:match(str) + return lpegmatch(stripper,str) end function aux.definetable(target) -- defines undefined tables @@ -2846,85 +3518,433 @@ function aux.accesstable(target) return t end +--~ function string.commaseparated(str) +--~ return gmatch(str,"([^,%s]+)") +--~ end + +-- as we use this a lot ... + +--~ function aux.cachefunction(action,weak) +--~ local cache = { } +--~ if weak then +--~ setmetatable(cache, { __mode = "kv" } ) +--~ end +--~ local function reminder(str) +--~ local found = cache[str] +--~ if not found then +--~ found = action(str) +--~ cache[str] = found +--~ end +--~ return found +--~ end +--~ return reminder, cache +--~ end + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['lxml-tab'] = { +if not modules then modules = { } end modules ['trac-tra'] = { version = 1.001, - comment = "this module is the basis for the lxml-* ones", + comment = "companion to trac-tra.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } ---[[ldx-- -

The parser used here is inspired by the variant discussed in the lua book, but -handles comment and processing instructions, has a different structure, provides -parent access; a first version used different trickery but was less optimized to we -went this route. First we had a find based parser, now we have an based one. -The find based parser can be found in l-xml-edu.lua along with other older code.

- -

Expecially the lpath code is experimental, we will support some of xpath, but -only things that make sense for us; as compensation it is possible to hook in your -own functions. Apart from preprocessing content for we also need -this module for process management, like handling and -files.

- - -a/b/c /*/c -a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) -a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) - - -

Beware, the interface may change. For instance at, ns, tg, dt may get more -verbose names. Once the code is stable we will also remove some tracing and -optimize the code.

---ldx]]-- +-- the tag is kind of generic and used for functions that are not +-- bound to a variable, like node.new, node.copy etc (contrary to for instance +-- node.has_attribute which is bound to a has_attribute local variable in mkiv) -xml = xml or { } +local debug = require "debug" ---~ local xml = xml +local getinfo = debug.getinfo +local type, next = type, next +local concat = table.concat +local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub -local concat, remove, insert = table.concat, table.remove, table.insert -local type, next, setmetatable = type, next, setmetatable -local format, lower, find = string.format, string.lower, string.find +debugger = debugger or { } ---[[ldx-- -

This module can be used stand alone but also inside in -which case it hooks into the tracker code. Therefore we provide a few -functions that set the tracers.

---ldx]]-- +local counters = { } +local names = { } -local trace_remap = false +-- one -if trackers then - trackers.register("xml.remap", function(v) trace_remap = v end) +local function hook() + local f = getinfo(2,"f").func + local n = getinfo(2,"Sn") +-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end + if f then + local cf = counters[f] + if cf == nil then + counters[f] = 1 + names[f] = n + else + counters[f] = cf + 1 + end + end end - -function xml.settrace(str,value) - if str == "remap" then - trace_remap = value or false +local function getname(func) + local n = names[func] + if n then + if n.what == "C" then + return n.name or '' + else + -- source short_src linedefined what name namewhat nups func + local name = n.name or n.namewhat or n.what + if not name or name == "" then name = "?" end + return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name) + end + else + return "unknown" + end +end +function debugger.showstats(printer,threshold) + printer = printer or texio.write or print + threshold = threshold or 0 + local total, grandtotal, functions = 0, 0, 0 + printer("\n") -- ugly but ok + -- table.sort(counters) + for func, count in next, counters do + if count > threshold then + local name = getname(func) + if not find(name,"for generator") then + printer(format("%8i %s", count, name)) + total = total + count + end + end + grandtotal = grandtotal + count + functions = functions + 1 end + printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) end ---[[ldx-- -

First a hack to enable namespace resolving. A namespace is characterized by -a . The following function associates a namespace prefix with a -pattern. We use , which in this case is more than twice as fast as a -find based solution where we loop over an array of patterns. Less code and -much cleaner.

---ldx]]-- +-- two -xml.xmlns = xml.xmlns or { } +--~ local function hook() +--~ local n = getinfo(2) +--~ if n.what=="C" and not n.name then +--~ local f = tostring(debug.traceback()) +--~ local cf = counters[f] +--~ if cf == nil then +--~ counters[f] = 1 +--~ names[f] = n +--~ else +--~ counters[f] = cf + 1 +--~ end +--~ end +--~ end +--~ function debugger.showstats(printer,threshold) +--~ printer = printer or texio.write or print +--~ threshold = threshold or 0 +--~ local total, grandtotal, functions = 0, 0, 0 +--~ printer("\n") -- ugly but ok +--~ -- table.sort(counters) +--~ for func, count in next, counters do +--~ if count > threshold then +--~ printer(format("%8i %s", count, func)) +--~ total = total + count +--~ end +--~ grandtotal = grandtotal + count +--~ functions = functions + 1 +--~ end +--~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) +--~ end -local check = lpeg.P(false) -local parse = check +-- rest ---[[ldx-- -

The next function associates a namespace prefix with an . This -normally happens independent of parsing.

+function debugger.savestats(filename,threshold) + local f = io.open(filename,'w') + if f then + debugger.showstats(function(str) f:write(str) end,threshold) + f:close() + end +end + +function debugger.enable() + debug.sethook(hook,"c") +end + +function debugger.disable() + debug.sethook() +--~ counters[debug.getinfo(2,"f").func] = nil +end + +function debugger.tracing() + local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0 + if n > 0 then + function debugger.tracing() return true end ; return true + else + function debugger.tracing() return false end ; return false + end +end + +--~ debugger.enable() + +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) + +--~ debugger.disable() + +--~ print("") +--~ debugger.showstats() +--~ print("") +--~ debugger.showstats(print,3) + +setters = setters or { } +setters.data = setters.data or { } + +--~ local function set(t,what,value) +--~ local data, done = t.data, t.done +--~ if type(what) == "string" then +--~ what = aux.settings_to_array(what) -- inefficient but ok +--~ end +--~ for i=1,#what do +--~ local w = what[i] +--~ for d, f in next, data do +--~ if done[d] then +--~ -- prevent recursion due to wildcards +--~ elseif find(d,w) then +--~ done[d] = true +--~ for i=1,#f do +--~ f[i](value) +--~ end +--~ end +--~ end +--~ end +--~ end + +local function set(t,what,value) + local data, done = t.data, t.done + if type(what) == "string" then + what = aux.settings_to_hash(what) -- inefficient but ok + end + for w, v in next, what do + if v == "" then + v = value + else + v = toboolean(v) + end + for d, f in next, data do + if done[d] then + -- prevent recursion due to wildcards + elseif find(d,w) then + done[d] = true + for i=1,#f do + f[i](v) + end + end + end + end +end + +local function reset(t) + for d, f in next, t.data do + for i=1,#f do + f[i](false) + end + end +end + +local function enable(t,what) + set(t,what,true) +end + +local function disable(t,what) + local data = t.data + if not what or what == "" then + t.done = { } + reset(t) + else + set(t,what,false) + end +end + +function setters.register(t,what,...) + local data = t.data + what = lower(what) + local w = data[what] + if not w then + w = { } + data[what] = w + end + for _, fnc in next, { ... } do + local typ = type(fnc) + if typ == "function" then + w[#w+1] = fnc + elseif typ == "string" then + w[#w+1] = function(value) set(t,fnc,value,nesting) end + end + end +end + +function setters.enable(t,what) + local e = t.enable + t.enable, t.done = enable, { } + enable(t,string.simpleesc(tostring(what))) + t.enable, t.done = e, { } +end + +function setters.disable(t,what) + local e = t.disable + t.disable, t.done = disable, { } + disable(t,string.simpleesc(tostring(what))) + t.disable, t.done = e, { } +end + +function setters.reset(t) + t.done = { } + reset(t) +end + +function setters.list(t) -- pattern + local list = table.sortedkeys(t.data) + local user, system = { }, { } + for l=1,#list do + local what = list[l] + if find(what,"^%*") then + system[#system+1] = what + else + user[#user+1] = what + end + end + return user, system +end + +function setters.show(t) + commands.writestatus("","") + local list = setters.list(t) + for k=1,#list do + commands.writestatus(t.name,list[k]) + end + commands.writestatus("","") +end + +-- we could have used a bit of oo and the trackers:enable syntax but +-- there is already a lot of code around using the singular tracker + +-- we could make this into a module + +function setters.new(name) + local t + t = { + data = { }, + name = name, + enable = function(...) setters.enable (t,...) end, + disable = function(...) setters.disable (t,...) end, + register = function(...) setters.register(t,...) end, + list = function(...) setters.list (t,...) end, + show = function(...) setters.show (t,...) end, + } + setters.data[name] = t + return t +end + +trackers = setters.new("trackers") +directives = setters.new("directives") +experiments = setters.new("experiments") + +-- nice trick: we overload two of the directives related functions with variants that +-- do tracing (itself using a tracker) .. proof of concept + +local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) +local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) + +local e = directives.enable +local d = directives.disable + +function directives.enable(...) + commands.writestatus("directives","enabling: %s",concat({...}," ")) + e(...) +end + +function directives.disable(...) + commands.writestatus("directives","disabling: %s",concat({...}," ")) + d(...) +end + +local e = experiments.enable +local d = experiments.disable + +function experiments.enable(...) + commands.writestatus("experiments","enabling: %s",concat({...}," ")) + e(...) +end + +function experiments.disable(...) + commands.writestatus("experiments","disabling: %s",concat({...}," ")) + d(...) +end + +-- a useful example + +directives.register("system.nostatistics", function(v) + statistics.enable = not v +end) + + + +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['lxml-tab'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc +-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the +-- trouble + +local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) + +--[[ldx-- +

The parser used here is inspired by the variant discussed in the lua book, but +handles comment and processing instructions, has a different structure, provides +parent access; a first version used different trickery but was less optimized to we +went this route. First we had a find based parser, now we have an based one. +The find based parser can be found in l-xml-edu.lua along with other older code.

+ +

Beware, the interface may change. For instance at, ns, tg, dt may get more +verbose names. Once the code is stable we will also remove some tracing and +optimize the code.

+--ldx]]-- + +xml = xml or { } + +--~ local xml = xml + +local concat, remove, insert = table.concat, table.remove, table.insert +local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber +local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub +local utfchar = unicode.utf8.char +local lpegmatch = lpeg.match +local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs + +--[[ldx-- +

First a hack to enable namespace resolving. A namespace is characterized by +a . The following function associates a namespace prefix with a +pattern. We use , which in this case is more than twice as fast as a +find based solution where we loop over an array of patterns. Less code and +much cleaner.

+--ldx]]-- + +xml.xmlns = xml.xmlns or { } + +local check = P(false) +local parse = check + +--[[ldx-- +

The next function associates a namespace prefix with an . This +normally happens independent of parsing.

xml.registerns("mml","mathml") @@ -2932,8 +3952,8 @@ xml.registerns("mml","mathml") --ldx]]-- function xml.registerns(namespace, pattern) -- pattern can be an lpeg - check = check + lpeg.C(lpeg.P(lower(pattern))) / namespace - parse = lpeg.P { lpeg.P(check) + 1 * lpeg.V(1) } + check = check + C(P(lower(pattern))) / namespace + parse = P { P(check) + 1 * V(1) } end --[[ldx-- @@ -2947,7 +3967,7 @@ xml.checkns("m","http://www.w3.org/mathml") --ldx]]-- function xml.checkns(namespace,url) - local ns = parse:match(lower(url)) + local ns = lpegmatch(parse,lower(url)) if ns and namespace ~= ns then xml.xmlns[namespace] = ns end @@ -2965,7 +3985,7 @@ This returns mml. --ldx]]-- function xml.resolvens(url) - return parse:match(lower(url)) or "" + return lpegmatch(parse,lower(url)) or "" end --[[ldx-- @@ -3004,27 +4024,36 @@ local x = xml.convert(somestring)

An optional second boolean argument tells this function not to create a root element.

---ldx]]-- -xml.strip_cm_and_dt = false -- an extra global flag, in case we have many includes +

Valid entities are:

+ + + + + + +--ldx]]-- -- not just one big nested table capture (lpeg overflow) local nsremap, resolvens = xml.xmlns, xml.resolvens -local stack, top, dt, at, xmlns, errorstr, entities = {}, {}, {}, {}, {}, nil, {} +local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { } +local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false +local dcache, hcache, acache = { }, { }, { } -local mt = { __tostring = xml.text } +local mt = { } -function xml.check_error(top,toclose) - return "" +function initialize_mt(root) + mt = { __index = root } -- will be redefined later end -local strip = false -local cleanup = false +function xml.setproperty(root,k,v) + getmetatable(root).__index[k] = v +end -function xml.set_text_cleanup(fnc) - cleanup = fnc +function xml.check_error(top,toclose) + return "" end local function add_attribute(namespace,tag,value) @@ -3034,12 +4063,31 @@ local function add_attribute(namespace,tag,value) if tag == "xmlns" then xmlns[#xmlns+1] = resolvens(value) at[tag] = value + elseif namespace == "" then + at[tag] = value elseif namespace == "xmlns" then xml.checkns(tag,value) at["xmlns:" .. tag] = value else - at[tag] = value + -- for the moment this way: + at[namespace .. ":" .. tag] = value + end +end + +local function add_empty(spacing, namespace, tag) + if #spacing > 0 then + dt[#dt+1] = spacing + end + local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace + top = stack[#stack] + dt = top.dt + local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } + dt[#dt+1] = t + setmetatable(t, mt) + if at.xmlns then + remove(xmlns) end + at = { } end local function add_begin(spacing, namespace, tag) @@ -3067,28 +4115,12 @@ local function add_end(spacing, namespace, tag) end dt = top.dt dt[#dt+1] = toclose - dt[0] = top + -- dt[0] = top -- nasty circular reference when serializing table if toclose.at.xmlns then remove(xmlns) end end -local function add_empty(spacing, namespace, tag) - if #spacing > 0 then - dt[#dt+1] = spacing - end - local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace - top = stack[#stack] - dt = top.dt - local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } - dt[#dt+1] = t - setmetatable(t, mt) - if at.xmlns then - remove(xmlns) - end - at = { } -end - local function add_text(text) if cleanup and #text > 0 then dt[#dt+1] = cleanup(text) @@ -3104,7 +4136,7 @@ local function add_special(what, spacing, text) if strip and (what == "@cm@" or what == "@dt@") then -- forget it else - dt[#dt+1] = { special=true, ns="", tg=what, dt={text} } + dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } } end end @@ -3112,42 +4144,260 @@ local function set_message(txt) errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","") end -local P, S, R, C, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V +local reported_attribute_errors = { } -local space = S(' \r\n\t') -local open = P('<') -local close = P('>') -local squote = S("'") -local dquote = S('"') -local equal = P('=') -local slash = P('/') -local colon = P(':') -local valid = R('az', 'AZ', '09') + S('_-.') -local name_yes = C(valid^1) * colon * C(valid^1) -local name_nop = C(P(true)) * C(valid^1) -local name = name_yes + name_nop +local function attribute_value_error(str) + if not reported_attribute_errors[str] then + logs.report("xml","invalid attribute value: %q",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end +local function attribute_specification_error(str) + if not reported_attribute_errors[str] then + logs.report("xml","invalid attribute specification: %q",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end -local utfbom = P('\000\000\254\255') + P('\255\254\000\000') + - P('\255\254') + P('\254\255') + P('\239\187\191') -- no capture +function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or format("&%s;",str) end +function xml.unknown_hex_entity_format(str) return format("&#x%s;",str) end +function xml.unknown_any_entity_format(str) return format("&#x%s;",str) end -local spacing = C(space^0) -local justtext = C((1-open)^1) -local somespace = space^1 -local optionalspace = space^0 +local function fromhex(s) + local n = tonumber(s,16) + if n then + return utfchar(n) + else + return format("h:%s",s), true + end +end -local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -local attribute = (somespace * name * optionalspace * equal * optionalspace * value) / add_attribute -local attributes = attribute^0 +local function fromdec(s) + local n = tonumber(s) + if n then + return utfchar(n) + else + return format("d:%s",s), true + end +end -local text = justtext / add_text -local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example +-- one level expansion (simple case), no checking done -local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty -local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin -local endelement = (spacing * open * slash * name * optionalspace * close) / add_end +local rest = (1-P(";"))^0 +local many = P(1)^0 -local begincomment = open * P("!--") -local endcomment = P("--") * close +local parsedentity = + P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) + + (P("#x")*(many/fromhex) + P("#")*(many/fromdec)) + +-- parsing in the xml file + +local predefined_unified = { + [38] = "&", + [42] = """, + [47] = "'", + [74] = "<", + [76] = "&gr;", +} + +local predefined_simplified = { + [38] = "&", amp = "&", + [42] = '"', quot = '"', + [47] = "'", apos = "'", + [74] = "<", lt = "<", + [76] = ">", gt = ">", +} + +local function handle_hex_entity(str) + local h = hcache[str] + if not h then + local n = tonumber(str,16) + h = unify_predefined and predefined_unified[n] + if h then + if trace_entities then + logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h) + end + elseif utfize then + h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or "" + if not n then + logs.report("xml","utfize, ignoring hex entity &#x%s;",str) + elseif trace_entities then + logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h) + end + else + if trace_entities then + logs.report("xml","found entity &#x%s;",str) + end + h = "&#x" .. str .. ";" + end + hcache[str] = h + end + return h +end + +local function handle_dec_entity(str) + local d = dcache[str] + if not d then + local n = tonumber(str) + d = unify_predefined and predefined_unified[n] + if d then + if trace_entities then + logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d) + end + elseif utfize then + d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or "" + if not n then + logs.report("xml","utfize, ignoring dec entity &#%s;",str) + elseif trace_entities then + logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h) + end + else + if trace_entities then + logs.report("xml","found entity &#%s;",str) + end + d = "&#" .. str .. ";" + end + dcache[str] = d + end + return d +end + +xml.parsedentitylpeg = parsedentity + +local function handle_any_entity(str) + if resolve then + local a = acache[str] -- per instance ! todo + if not a then + a = resolve_predefined and predefined_simplified[str] + if a then + -- one of the predefined + elseif type(resolve) == "function" then + a = resolve(str) or entities[str] + else + a = entities[str] + end + if a then + if trace_entities then + logs.report("xml","resolved entity &%s; -> %s (internal)",str,a) + end + a = lpegmatch(parsedentity,a) or a + else + if xml.unknown_any_entity_format then + a = xml.unknown_any_entity_format(str) or "" + end + if a then + if trace_entities then + logs.report("xml","resolved entity &%s; -> %s (external)",str,a) + end + else + if trace_entities then + logs.report("xml","keeping entity &%s;",str) + end + if str == "" then + a = "&error;" + else + a = "&" .. str .. ";" + end + end + end + acache[str] = a + elseif trace_entities then + if not acache[str] then + logs.report("xml","converting entity &%s; into %s",str,a) + acache[str] = a + end + end + return a + else + local a = acache[str] + if not a then + if trace_entities then + logs.report("xml","found entity &%s;",str) + end + a = resolve_predefined and predefined_simplified[str] + if a then + -- one of the predefined + acache[str] = a + elseif str == "" then + a = "&error;" + acache[str] = a + else + a = "&" .. str .. ";" + acache[str] = a + end + end + return a + end +end + +local function handle_end_entity(chr) + logs.report("xml","error in entity, %q found instead of ';'",chr) +end + +local space = S(' \r\n\t') +local open = P('<') +local close = P('>') +local squote = S("'") +local dquote = S('"') +local equal = P('=') +local slash = P('/') +local colon = P(':') +local semicolon = P(';') +local ampersand = P('&') +local valid = R('az', 'AZ', '09') + S('_-.') +local name_yes = C(valid^1) * colon * C(valid^1) +local name_nop = C(P(true)) * C(valid^1) +local name = name_yes + name_nop +local utfbom = lpeg.patterns.utfbom -- no capture +local spacing = C(space^0) + +----- entitycontent = (1-open-semicolon)^0 +local anyentitycontent = (1-open-semicolon-space-close)^0 +local hexentitycontent = R("AF","af","09")^0 +local decentitycontent = R("09")^0 +local parsedentity = P("#")/"" * ( + P("x")/"" * (hexentitycontent/handle_hex_entity) + + (decentitycontent/handle_dec_entity) + ) + (anyentitycontent/handle_any_entity) +local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity)) + +local text_unparsed = C((1-open)^1) +local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1) + +local somespace = space^1 +local optionalspace = space^0 + +----- value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value +local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value + +local endofattributes = slash * close + close -- recovery of flacky html +local whatever = space * name * optionalspace * equal +local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error +----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error +----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error +local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error + +local attributevalue = value + wrongvalue + +local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute +----- attributes = (attribute)^0 + +local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0 + +local parsedtext = text_parsed / add_text +local unparsedtext = text_unparsed / add_text +local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example + +local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty +local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin +local endelement = (spacing * open * slash * name * optionalspace * close) / add_end + +local begincomment = open * P("!--") +local endcomment = P("--") * close local begininstruction = open * P("?") local endinstruction = P("?") * close local begincdata = open * P("![CDATA[") @@ -3157,19 +4407,27 @@ local someinstruction = C((1 - endinstruction)^0) local somecomment = C((1 - endcomment )^0) local somecdata = C((1 - endcdata )^0) -local function entity(k,v) entities[k] = v end +local function normalentity(k,v ) entities[k] = v end +local function systementity(k,v,n) entities[k] = v end +local function publicentity(k,v,n) entities[k] = v end local begindoctype = open * P("!DOCTYPE") local enddoctype = close local beginset = P("[") local endset = P("]") -local doctypename = C((1-somespace)^0) +local doctypename = C((1-somespace-close)^0) local elementdoctype = optionalspace * P(" & + cleanup = settings.text_cleanup + stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { } + acache, hcache, dcache = { }, { }, { } -- not stored + reported_attribute_errors = { } + if settings.parent_root then + mt = getmetatable(settings.parent_root) + else + initialize_mt(top) + end stack[#stack+1] = top top.dt = { } dt = top.dt if not data or data == "" then errorstr = "empty xml file" - elseif not grammar:match(data) then - errorstr = "invalid xml file" + elseif utfize or resolve then + if lpegmatch(grammar_parsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - parsed text" + end + elseif type(data) == "string" then + if lpegmatch(grammar_unparsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - unparsed text" + end else - errorstr = "" + errorstr = "invalid xml file - no text at all" end if errorstr and errorstr ~= "" then - result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={}, er = true } }, error = true } + result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } } setmetatable(stack, mt) - if xml.error_handler then xml.error_handler("load",errorstr) end + local error_handler = settings.error_handler + if error_handler == false then + -- no error message + else + error_handler = error_handler or xml.error_handler + if error_handler then + xml.error_handler("load",errorstr) + end + end else result = stack[1] end - if not no_root then - result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={}, entities = entities } + if not settings.no_root then + result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings } setmetatable(result, mt) local rdt = result.dt for k=1,#rdt do local v = rdt[k] if type(v) == "table" and not v.special then -- always table -) result.ri = k -- rootindex +v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this ! break end end end + if errorstr and errorstr ~= "" then + result.error = true + end return result end +xml.convert = xmlconvert + +function xml.inheritedconvert(data,xmldata) + local settings = xmldata.settings + settings.parent_root = xmldata -- to be tested + -- settings.no_root = true + local xc = xmlconvert(data,settings) + -- xc.settings = nil + -- xc.entities = nil + -- xc.special = nil + -- xc.ri = nil + -- print(xc.tg) + return xc +end + --[[ldx--

Packaging data in an xml like table is done with the following function. Maybe it will go away (when not used).

@@ -3243,7 +4557,7 @@ function xml.is_valid(root) end function xml.package(tag,attributes,data) - local ns, tg = tag:match("^(.-):?([^:]+)$") + local ns, tg = match(tag,"^(.-):?([^:]+)$") local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} } setmetatable(t, mt) return t @@ -3261,21 +4575,19 @@ the whole file first. The function accepts a string representing a filename or a file handle.

--ldx]]-- -function xml.load(filename) +function xml.load(filename,settings) + local data = "" if type(filename) == "string" then + -- local data = io.loaddata(filename) - -todo: check type in io.loaddata local f = io.open(filename,'r') if f then - local root = xml.convert(f:read("*all")) + data = f:read("*all") f:close() - return root - else - return xml.convert("") end elseif filename then -- filehandle - return xml.convert(filename:read("*all")) - else - return xml.convert("") + data = filename:read("*all") end + return xmlconvert(data,settings) end --[[ldx-- @@ -3283,9 +4595,11 @@ end valid trees, which is what the next function does.

--ldx]]-- +local no_root = { no_root = true } + function xml.toxml(data) if type(data) == "string" then - local root = { xml.convert(data,true) } + local root = { xmlconvert(data,no_root) } return (#root > 1 and root) or root[1] else return data @@ -3305,7 +4619,7 @@ local function copy(old,tables) if not tables[old] then tables[old] = new end - for k,v in pairs(old) do + for k,v in next, old do new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v end local mt = getmetatable(old) @@ -3330,175 +4644,12 @@ alternative.

-- todo: add when not present -local fallbackhandle = (tex and tex.sprint) or io.write - -local function serialize(e, handle, textconverter, attributeconverter, specialconverter, nocommands) - if not e then - return - elseif not nocommands then - local ec = e.command - if ec ~= nil then -- we can have all kind of types - if e.special then - local etg, edt = e.tg, e.dt - local spc = specialconverter and specialconverter[etg] - if spc then - local result = spc(edt[1]) - if result then - handle(result) - return - else - -- no need to handle any further - end - end - end - local xc = xml.command - if xc then - xc(e,ec) - return - end - end - end - handle = handle or fallbackhandle - local etg = e.tg - if etg then - if e.special then - local edt = e.dt - local spc = specialconverter and specialconverter[etg] - if spc then - local result = spc(edt[1]) - if result then - handle(result) - else - -- no need to handle any further - end - elseif etg == "@pi@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@cm@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@cd@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@dt@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@rt@" then - serialize(edt,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - else - local ens, eat, edt, ern = e.ns, e.at, e.dt, e.rn - local ats = eat and next(eat) and { } -- type test maybe faster - if ats then - if attributeconverter then - for k,v in next, eat do - ats[#ats+1] = format('%s=%q',k,attributeconverter(v)) - end - else - for k,v in next, eat do - ats[#ats+1] = format('%s=%q',k,v) - end - end - end - if ern and trace_remap and ern ~= ens then - ens = ern - end - if ens ~= "" then - if edt and #edt > 0 then - if ats then - -- handle(format("<%s:%s %s>",ens,etg,concat(ats," "))) - handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. ">") - else - -- handle(format("<%s:%s>",ens,etg)) - handle("<" .. ens .. ":" .. etg .. ">") - end - for i=1,#edt do - local e = edt[i] - if type(e) == "string" then - if textconverter then - handle(textconverter(e)) - else - handle(e) - end - else - serialize(e,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - -- handle(format("",ens,etg)) - handle("") - else - if ats then - -- handle(format("<%s:%s %s/>",ens,etg,concat(ats," "))) - handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. "/>") - else - -- handle(format("<%s:%s/>",ens,etg)) - handle("<" .. ens .. ":" .. etg .. "/>") - end - end - else - if edt and #edt > 0 then - if ats then - -- handle(format("<%s %s>",etg,concat(ats," "))) - handle("<" .. etg .. " " .. concat(ats," ") .. ">") - else - -- handle(format("<%s>",etg)) - handle("<" .. etg .. ">") - end - for i=1,#edt do - local ei = edt[i] - if type(ei) == "string" then - if textconverter then - handle(textconverter(ei)) - else - handle(ei) - end - else - serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - -- handle(format("",etg)) - handle("") - else - if ats then - -- handle(format("<%s %s/>",etg,concat(ats," "))) - handle("<" .. etg .. " " .. concat(ats," ") .. "/>") - else - -- handle(format("<%s/>",etg)) - handle("<" .. etg .. "/>") - end - end - end - end - elseif type(e) == "string" then - if textconverter then - handle(textconverter(e)) - else - handle(e) - end - else - for i=1,#e do - local ei = e[i] - if type(ei) == "string" then - if textconverter then - handle(textconverter(ei)) - else - handle(ei) - end - else - serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - end -end - -xml.serialize = serialize - function xml.checkbom(root) -- can be made faster if root.ri then local dt, found = root.dt, false for k=1,#dt do local v = dt[k] - if type(v) == "table" and v.special and v.tg == "@pi" and find(v.dt,"xml.*version=") then + if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then found = true break end @@ -3515,111 +4666,356 @@ end and then handle the lot.

--ldx]]-- -function xml.tostring(root) -- 25% overhead due to collecting - if root then - if type(root) == 'string' then - return root - elseif next(root) then -- next is faster than type (and >0 test) - local result = { } - serialize(root,function(s) result[#result+1] = s end) -- brrr, slow (direct printing is faster) - return concat(result,"") +-- new experimental reorganized serialize + +local function verbose_element(e,handlers) + local handle = handlers.handle + local serialize = handlers.serialize + local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn + local ats = eat and next(eat) and { } + if ats then + for k,v in next, eat do + ats[#ats+1] = format('%s=%q',k,v) end end - return "" -end - ---[[ldx-- -

The next function operated on the content only and needs a handle function -that accepts a string.

---ldx]]-- - -function xml.string(e,handle) - if not handle or (e.special and e.tg ~= "@rt@") then - -- nothing - elseif e.tg then - local edt = e.dt - if edt then + if ern and trace_remap and ern ~= ens then + ens = ern + end + if ens ~= "" then + if edt and #edt > 0 then + if ats then + handle("<",ens,":",etg," ",concat(ats," "),">") + else + handle("<",ens,":",etg,">") + end for i=1,#edt do - xml.string(edt[i],handle) + local e = edt[i] + if type(e) == "string" then + handle(e) + else + serialize(e,handlers) + end + end + handle("") + else + if ats then + handle("<",ens,":",etg," ",concat(ats," "),"/>") + else + handle("<",ens,":",etg,"/>") end end else - handle(e) + if edt and #edt > 0 then + if ats then + handle("<",etg," ",concat(ats," "),">") + else + handle("<",etg,">") + end + for i=1,#edt do + local ei = edt[i] + if type(ei) == "string" then + handle(ei) + else + serialize(ei,handlers) + end + end + handle("") + else + if ats then + handle("<",etg," ",concat(ats," "),"/>") + else + handle("<",etg,"/>") + end + end end end ---[[ldx-- -

How you deal with saving data depends on your preferences. For a 40 MB database -file the timing on a 2.3 Core Duo are as follows (time in seconds):

- - -1.3 : load data from file to string -6.1 : convert string into tree -5.3 : saving in file using xmlsave -6.8 : converting to string using xml.tostring -3.6 : saving converted string in file - +local function verbose_pi(e,handlers) + handlers.handle("") +end -

The save function is given below.

---ldx]]-- +local function verbose_comment(e,handlers) + handlers.handle("") +end -function xml.save(root,name) - local f = io.open(name,"w") - if f then - xml.serialize(root,function(s) f:write(s) end) - f:close() - end +local function verbose_cdata(e,handlers) + handlers.handle("") end ---[[ldx-- -

A few helpers:

---ldx]]-- +local function verbose_doctype(e,handlers) + handlers.handle("") +end -function xml.body(root) - return (root.ri and root.dt[root.ri]) or root +local function verbose_root(e,handlers) + handlers.serialize(e.dt,handlers) end -function xml.text(root) - return (root and xml.tostring(root)) or "" +local function verbose_text(e,handlers) + handlers.handle(e) end -function xml.content(root) -- bugged - return (root and root.dt and xml.tostring(root.dt)) or "" +local function verbose_document(e,handlers) + local serialize = handlers.serialize + local functions = handlers.functions + for i=1,#e do + local ei = e[i] + if type(ei) == "string" then + functions["@tx@"](ei,handlers) + else + serialize(ei,handlers) + end + end end -function xml.isempty(root, pattern) - if pattern == "" or pattern == "*" then - pattern = nil +local function serialize(e,handlers,...) + local initialize = handlers.initialize + local finalize = handlers.finalize + local functions = handlers.functions + if initialize then + local state = initialize(...) + if not state == true then + return state + end end - if pattern then - -- todo - return false + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) else - return not root or not root.dt or #root.dt == 0 or root.dt == "" + functions["@dc@"](e,handlers) + end + if finalize then + return finalize() end end ---[[ldx-- -

The next helper erases an element but keeps the table as it is, -and since empty strings are not serialized (effectively) it does -not harm. Copying the table would take more time. Usage:

- - -dt[k] = xml.empty() or xml.empty(dt,k) - ---ldx]]-- - -function xml.empty(dt,k) - if dt and k then - dt[k] = "" - return dt[k] +local function xserialize(e,handlers) + local functions = handlers.functions + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) else - return "" + functions["@dc@"](e,handlers) end end ---[[ldx-- -

The next helper assigns a tree (or string). Usage:

+local handlers = { } + +local function newhandlers(settings) + local t = table.copy(handlers.verbose or { }) -- merge + if settings then + for k,v in next, settings do + if type(v) == "table" then + tk = t[k] if not tk then tk = { } t[k] = tk end + for kk,vv in next, v do + tk[kk] = vv + end + else + t[k] = v + end + end + if settings.name then + handlers[settings.name] = t + end + end + return t +end + +local nofunction = function() end + +function xml.sethandlersfunction(handler,name,fnc) + handler.functions[name] = fnc or nofunction +end + +function xml.gethandlersfunction(handler,name) + return handler.functions[name] +end + +function xml.gethandlers(name) + return handlers[name] +end + +newhandlers { + name = "verbose", + initialize = false, -- faster than nil and mt lookup + finalize = false, -- faster than nil and mt lookup + serialize = xserialize, + handle = print, + functions = { + ["@dc@"] = verbose_document, + ["@dt@"] = verbose_doctype, + ["@rt@"] = verbose_root, + ["@el@"] = verbose_element, + ["@pi@"] = verbose_pi, + ["@cm@"] = verbose_comment, + ["@cd@"] = verbose_cdata, + ["@tx@"] = verbose_text, + } +} + +--[[ldx-- +

How you deal with saving data depends on your preferences. For a 40 MB database +file the timing on a 2.3 Core Duo are as follows (time in seconds):

+ + +1.3 : load data from file to string +6.1 : convert string into tree +5.3 : saving in file using xmlsave +6.8 : converting to string using xml.tostring +3.6 : saving converted string in file + + +

Beware, these were timing with the old routine but measurements will not be that +much different I guess.

+--ldx]]-- + +-- maybe this will move to lxml-xml + +local result + +local xmlfilehandler = newhandlers { + name = "file", + initialize = function(name) result = io.open(name,"wb") return result end, + finalize = function() result:close() return true end, + handle = function(...) result:write(...) end, +} + +-- no checking on writeability here but not faster either +-- +-- local xmlfilehandler = newhandlers { +-- initialize = function(name) io.output(name,"wb") return true end, +-- finalize = function() io.close() return true end, +-- handle = io.write, +-- } + + +function xml.save(root,name) + serialize(root,xmlfilehandler,name) +end + +local result + +local xmlstringhandler = newhandlers { + name = "string", + initialize = function() result = { } return result end, + finalize = function() return concat(result) end, + handle = function(...) result[#result+1] = concat { ... } end +} + +local function xmltostring(root) -- 25% overhead due to collecting + if root then + if type(root) == 'string' then + return root + else -- if next(root) then -- next is faster than type (and >0 test) + return serialize(root,xmlstringhandler) or "" + end + end + return "" +end + +local function xmltext(root) -- inline + return (root and xmltostring(root)) or "" +end + +function initialize_mt(root) + mt = { __tostring = xmltext, __index = root } +end + +xml.defaulthandlers = handlers +xml.newhandlers = newhandlers +xml.serialize = serialize +xml.tostring = xmltostring + +--[[ldx-- +

The next function operated on the content only and needs a handle function +that accepts a string.

+--ldx]]-- + +local function xmlstring(e,handle) + if not handle or (e.special and e.tg ~= "@rt@") then + -- nothing + elseif e.tg then + local edt = e.dt + if edt then + for i=1,#edt do + xmlstring(edt[i],handle) + end + end + else + handle(e) + end +end + +xml.string = xmlstring + +--[[ldx-- +

A few helpers:

+--ldx]]-- + +--~ xmlsetproperty(root,"settings",settings) + +function xml.settings(e) + while e do + local s = e.settings + if s then + return s + else + e = e.__p__ + end + end + return nil +end + +function xml.root(e) + local r = e + while e do + e = e.__p__ + if e then + r = e + end + end + return r +end + +function xml.parent(root) + return root.__p__ +end + +function xml.body(root) + return (root.ri and root.dt[root.ri]) or root -- not ok yet +end + +function xml.name(root) + if not root then + return "" + elseif root.ns == "" then + return root.tg + else + return root.ns .. ":" .. root.tg + end +end + +--[[ldx-- +

The next helper erases an element but keeps the table as it is, +and since empty strings are not serialized (effectively) it does +not harm. Copying the table would take more time. Usage:

+--ldx]]-- + +function xml.erase(dt,k) + if dt then + if k then + dt[k] = "" + else for k=1,#dt do + dt[1] = { "" } + end end + end +end + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

dt[k] = xml.assign(root) or xml.assign(dt,k,root) @@ -3635,6 +5031,42 @@ function xml.assign(dt,k,root) end end +-- the following helpers may move + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

+ +xml.tocdata(e) +xml.tocdata(e,"error") + +--ldx]]-- + +function xml.tocdata(e,wrapper) + local whatever = xmltostring(e.dt) + if wrapper then + whatever = format("<%s>%s",wrapper,whatever,wrapper) + end + local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e } + setmetatable(t,getmetatable(e)) + e.dt = { t } +end + +function xml.makestandalone(root) + if root.ri then + local dt = root.dt + for k=1,#dt do + local v = dt[k] + if type(v) == "table" and v.special and v.tg == "@pi@" then + local txt = v.dt[1] + if find(txt,"xml.*version=") then + v.dt[1] = txt .. " standalone='yes'" + break + end + end + end + end +end + end -- of closure @@ -3648,1420 +5080,1285 @@ if not modules then modules = { } end modules ['lxml-pth'] = { license = "see context related readme files" } +-- e.ni is only valid after a filter run + local concat, remove, insert = table.concat, table.remove, table.insert local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, lower, gmatch, gsub, find = string.format, string.lower, string.gmatch, string.gsub, string.find +local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep +local lpegmatch = lpeg.match + +-- beware, this is not xpath ... e.g. position is different (currently) and +-- we have reverse-sibling as reversed preceding sibling --[[ldx--

This module can be used stand alone but also inside in which case it hooks into the tracker code. Therefore we provide a few functions that set the tracers. Here we overload a previously defined function.

+

If I can get in the mood I will make a variant that is XSLT compliant +but I wonder if it makes sense.

--ldx]]-- -local trace_lpath = false - -if trackers then - trackers.register("xml.lpath", function(v) trace_lpath = v end) -end +--[[ldx-- +

Expecially the lpath code is experimental, we will support some of xpath, but +only things that make sense for us; as compensation it is possible to hook in your +own functions. Apart from preprocessing content for we also need +this module for process management, like handling and +files.

-local settrace = xml.settrace -- lxml-tab + +a/b/c /*/c +a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) +a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) + +--ldx]]-- -function xml.settrace(str,value) - if str == "lpath" then - trace_lpath = value or false - else - settrace(str,value) -- lxml-tab - end -end +local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end +local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end +local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end --[[ldx-- -

We've now arrived at an intersting part: accessing the tree using a subset +

We've now arrived at an interesting part: accessing the tree using a subset of and since we're not compatible we call it . We will explain more about its usage in other documents.

--ldx]]-- -local lpathcalls = 0 -- statistics -local lpathcached = 0 -- statistics +local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end +local lpathcached = 0 function xml.lpathcached() return lpathcached end -xml.functions = xml.functions or { } -xml.expressions = xml.expressions or { } +xml.functions = xml.functions or { } -- internal +xml.expressions = xml.expressions or { } -- in expressions +xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection) +xml.specialhandler = xml.specialhandler or { } local functions = xml.functions local expressions = xml.expressions +local finalizers = xml.finalizers -local actions = { - [10] = "stay", - [11] = "parent", - [12] = "subtree root", - [13] = "document root", - [14] = "any", - [15] = "many", - [16] = "initial", - [20] = "match", - [21] = "match one of", - [22] = "match and attribute eq", - [23] = "match and attribute ne", - [24] = "match one of and attribute eq", - [25] = "match one of and attribute ne", - [27] = "has attribute", - [28] = "has value", - [29] = "fast match", - [30] = "select", - [31] = "expression", - [40] = "processing instruction", -} - --- a rather dumb lpeg +finalizers.xml = finalizers.xml or { } +finalizers.tex = finalizers.tex or { } -local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc +local function fallback (t, name) + local fn = finalizers[name] + if fn then + t[name] = fn + else + logs.report("xml","unknown sub finalizer '%s'",tostring(name)) + fn = function() end + end + return fn +end --- instead of using functions we just parse a few names which saves a call --- later on +setmetatable(finalizers.xml, { __index = fallback }) +setmetatable(finalizers.tex, { __index = fallback }) -local lp_position = P("position()") / "ps" -local lp_index = P("index()") / "id" -local lp_text = P("text()") / "tx" -local lp_name = P("name()") / "(ns~='' and ns..':'..tg)" -- "((rt.ns~='' and rt.ns..':'..rt.tg) or '')" -local lp_tag = P("tag()") / "tg" -- (rt.tg or '') -local lp_ns = P("ns()") / "ns" -- (rt.ns or '') -local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") -local lp_doequal = P("=") / "==" -local lp_attribute = P("@") / "" * Cc("(at['") * R("az","AZ","--","__")^1 * Cc("'] or '')") +xml.defaultprotocol = "xml" -local lp_lua_function = C(R("az","AZ","--","__")^1 * (P(".") * R("az","AZ","--","__")^1)^1) * P("(") / function(t) -- todo: better . handling - return t .. "(" -end +-- as xsl does not follow xpath completely here we will also +-- be more liberal especially with regards to the use of | and +-- the rootpath: +-- +-- test : all 'test' under current +-- /test : 'test' relative to current +-- a|b|c : set of names +-- (a|b|c) : idem +-- ! : not +-- +-- after all, we're not doing transformations but filtering. in +-- addition we provide filter functions (last bit) +-- +-- todo: optimizer +-- +-- .. : parent +-- * : all kids +-- / : anchor here +-- // : /**/ +-- ** : all in between +-- +-- so far we had (more practical as we don't transform) +-- +-- {/test} : kids 'test' under current node +-- {test} : any kid with tag 'test' +-- {//test} : same as above -local lp_function = C(R("az","AZ","--","__")^1) * P("(") / function(t) -- todo: better . handling - if expressions[t] then - return "expressions." .. t .. "(" - else - return "expressions.error(" - end -end +-- evaluator (needs to be redone, for the moment copied) -local lparent = lpeg.P("(") -local rparent = lpeg.P(")") -local noparent = 1 - (lparent+rparent) -local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent} -local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"} +-- todo: apply_axis(list,notable) and collection vs single --- if we use a dedicated namespace then we don't need to pass rt and k +local apply_axis = { } -local lp_special = (C(P("name")+P("text")+P("tag"))) * value / function(t,s) - if expressions[t] then - if s then - return "expressions." .. t .. "(r,k," .. s ..")" - else - return "expressions." .. t .. "(r,k)" +apply_axis['root'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local rt = ll + while ll do + ll = ll.__p__ + if ll then + rt = ll + end end - else - return "expressions.error(" .. t .. ")" + collected[#collected+1] = rt end + return collected end -local converter = lpeg.Cs ( ( - lp_position + - lp_index + - lp_text + lp_name + -- fast one - lp_special + - lp_noequal + lp_doequal + - lp_attribute + - lp_lua_function + - lp_function + -1 )^1 ) - --- expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1 +apply_axis['self'] = function(list) +--~ local collected = { } +--~ for l=1,#list do +--~ collected[#collected+1] = list[l] +--~ end +--~ return collected + return list +end -local template = [[ - return function(expressions,r,d,k,e,dt,ns,tg,id,ps) - local at, tx = e.at or { }, dt[1] or "" - return %s +apply_axis['child'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local dt = ll.dt + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + end + end + ll.en = en end -]] - -local function make_expression(str) - str = converter:match(str) - return str, loadstring(format(template,str))() -end - -local map = { } - -local space = S(' \r\n\t') -local squote = S("'") -local dquote = S('"') -local lparent = P('(') -local rparent = P(')') -local atsign = P('@') -local lbracket = P('[') -local rbracket = P(']') -local exclam = P('!') -local period = P('.') -local eq = P('==') + P('=') -local ne = P('<>') + P('!=') -local star = P('*') -local slash = P('/') -local colon = P(':') -local bar = P('|') -local hat = P('^') -local valid = R('az', 'AZ', '09') + S('_-') -local name_yes = C(valid^1 + star) * colon * C(valid^1 + star) -- permits ns:* *:tg *:* -local name_nop = Cc("*") * C(valid^1) -local name = name_yes + name_nop -local number = C((S('+-')^0 * R('09')^1)) / tonumber -local names = (bar^0 * name)^1 -local morenames = name * (bar^0 * name)^1 -local instructiontag = P('pi::') -local spacing = C(space^0) -local somespace = space^1 -local optionalspace = space^0 -local text = C(valid^0) -local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -local empty = 1-slash - -local is_eq = lbracket * atsign * name * eq * value * rbracket -local is_ne = lbracket * atsign * name * ne * value * rbracket -local is_attribute = lbracket * atsign * name * rbracket -local is_value = lbracket * value * rbracket -local is_number = lbracket * number * rbracket - -local nobracket = 1-(lbracket+rbracket) -- must be improved -local is_expression = lbracket * C(((C(nobracket^1))/make_expression)) * rbracket - -local is_expression = lbracket * (C(nobracket^1))/make_expression * rbracket - -local is_one = name -local is_none = exclam * name -local is_one_of = ((lparent * names * rparent) + morenames) -local is_none_of = exclam * ((lparent * names * rparent) + morenames) - -local stay = (period ) -local parent = (period * period ) / function( ) map[#map+1] = { 11 } end -local subtreeroot = (slash + hat ) / function( ) map[#map+1] = { 12 } end -local documentroot = (hat * hat ) / function( ) map[#map+1] = { 13 } end -local any = (star ) / function( ) map[#map+1] = { 14 } end -local many = (star * star ) / function( ) map[#map+1] = { 15 } end -local initial = (hat * hat * hat ) / function( ) map[#map+1] = { 16 } end - -local match = (is_one ) / function(...) map[#map+1] = { 20, true , ... } end -local match_one_of = (is_one_of ) / function(...) map[#map+1] = { 21, true , ... } end -local dont_match = (is_none ) / function(...) map[#map+1] = { 20, false, ... } end -local dont_match_one_of = (is_none_of ) / function(...) map[#map+1] = { 21, false, ... } end - -local match_and_eq = (is_one * is_eq ) / function(...) map[#map+1] = { 22, true , ... } end -local match_and_ne = (is_one * is_ne ) / function(...) map[#map+1] = { 23, true , ... } end -local dont_match_and_eq = (is_none * is_eq ) / function(...) map[#map+1] = { 22, false, ... } end -local dont_match_and_ne = (is_none * is_ne ) / function(...) map[#map+1] = { 23, false, ... } end - -local match_one_of_and_eq = (is_one_of * is_eq ) / function(...) map[#map+1] = { 24, true , ... } end -local match_one_of_and_ne = (is_one_of * is_ne ) / function(...) map[#map+1] = { 25, true , ... } end -local dont_match_one_of_and_eq = (is_none_of * is_eq ) / function(...) map[#map+1] = { 24, false, ... } end -local dont_match_one_of_and_ne = (is_none_of * is_ne ) / function(...) map[#map+1] = { 25, false, ... } end - -local has_attribute = (is_one * is_attribute) / function(...) map[#map+1] = { 27, true , ... } end -local has_value = (is_one * is_value ) / function(...) map[#map+1] = { 28, true , ... } end -local dont_has_attribute = (is_none * is_attribute) / function(...) map[#map+1] = { 27, false, ... } end -local dont_has_value = (is_none * is_value ) / function(...) map[#map+1] = { 28, false, ... } end -local position = (is_one * is_number ) / function(...) map[#map+1] = { 30, true, ... } end -local dont_position = (is_none * is_number ) / function(...) map[#map+1] = { 30, false, ... } end - -local expression = (is_one * is_expression)/ function(...) map[#map+1] = { 31, true, ... } end -local dont_expression = (is_none * is_expression)/ function(...) map[#map+1] = { 31, false, ... } end - -local self_expression = ( is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end - map[#map+1] = { 31, true, "*", "*", ... } end -local dont_self_expression = (exclam * is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end - map[#map+1] = { 31, false, "*", "*", ... } end - -local instruction = (instructiontag * text ) / function(...) map[#map+1] = { 40, ... } end -local nothing = (empty ) / function( ) map[#map+1] = { 15 } end -- 15 ? -local crap = (1-slash)^1 - --- a few ugly goodies: - -local docroottag = P('^^') / function( ) map[#map+1] = { 12 } end -local subroottag = P('^') / function( ) map[#map+1] = { 13 } end -local roottag = P('root::') / function( ) map[#map+1] = { 12 } end -local parenttag = P('parent::') / function( ) map[#map+1] = { 11 } end -local childtag = P('child::') -local selftag = P('self::') - --- there will be more and order will be optimized - -local selector = ( - instruction + --- many + any + -- brrr, not here ! - parent + stay + - dont_position + position + - dont_match_one_of_and_eq + dont_match_one_of_and_ne + - match_one_of_and_eq + match_one_of_and_ne + - dont_match_and_eq + dont_match_and_ne + - match_and_eq + match_and_ne + - dont_expression + expression + - dont_self_expression + self_expression + - has_attribute + has_value + - dont_match_one_of + match_one_of + - dont_match + match + - many + any + - crap + empty -) - -local grammar = P { "startup", - startup = (initial + documentroot + subtreeroot + roottag + docroottag + subroottag)^0 * V("followup"), - followup = ((slash + parenttag + childtag + selftag)^0 * selector)^1, -} + return collected +end -local function compose(str) - if not str or str == "" then - -- wildcard - return true - elseif str == '/' then - -- root - return false - else - map = { } - grammar:match(str) - if #map == 0 then - return true - else - local m = map[1][1] - if #map == 1 then - if m == 14 or m == 15 then - -- wildcard - return true - elseif m == 12 then - -- root - return false - end - elseif #map == 2 and m == 12 and map[2][1] == 20 then - -- return { { 29, map[2][2], map[2][3], map[2][4], map[2][5] } } - map[2][1] = 29 - return { map[2] } - end - if m ~= 11 and m ~= 12 and m ~= 13 and m ~= 14 and m ~= 15 and m ~= 16 then - insert(map, 1, { 16 }) +local function collect(list,collected) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + collect(dk,collected) end - -- print(gsub(table.serialize(map),"[ \n]+"," ")) - return map end + list.en = en end end +apply_axis['descendant'] = function(list) + local collected = { } + for l=1,#list do + collect(list[l],collected) + end + return collected +end -local cache = { } - -function xml.lpath(pattern,trace) - lpathcalls = lpathcalls + 1 - if type(pattern) == "string" then - local result = cache[pattern] - if result == nil then -- can be false which is valid -) - result = compose(pattern) - cache[pattern] = result - lpathcached = lpathcached + 1 +local function collect(list,collected) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + collect(dk,collected) + end end - if trace or trace_lpath then - xml.lshow(result) + list.en = en + end +end +apply_axis['descendant-or-self'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + if ll.special ~= true then -- catch double root + collected[#collected+1] = ll end - return result - else - return pattern + collect(ll,collected) end + return collected end -function xml.cached_patterns() - return cache +apply_axis['ancestor'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + while ll do + ll = ll.__p__ + if ll then + collected[#collected+1] = ll + end + end + end + return collected end --- we run out of locals (limited to 200) --- --- local fallbackreport = (texio and texio.write) or io.write - -function xml.lshow(pattern,report) --- report = report or fallbackreport - report = report or (texio and texio.write) or io.write - local lp = xml.lpath(pattern) - if lp == false then - report(" -: root\n") - elseif lp == true then - report(" -: wildcard\n") - else - if type(pattern) == "string" then - report(format("pattern: %s\n",pattern)) - end - for k=1,#lp do - local v = lp[k] - if #v > 1 then - local t = { } - for i=2,#v do - local vv = v[i] - if type(vv) == "string" then - t[#t+1] = (vv ~= "" and vv) or "#" - elseif type(vv) == "boolean" then - t[#t+1] = (vv and "==") or "<>" - end - end - report(format("%2i: %s %s -> %s\n", k,v[1],actions[v[1]],concat(t," "))) - else - report(format("%2i: %s %s\n", k,v[1],actions[v[1]])) +apply_axis['ancestor-or-self'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + collected[#collected+1] = ll + while ll do + ll = ll.__p__ + if ll then + collected[#collected+1] = ll end end end + return collected end -function xml.xshow(e,...) -- also handy when report is given, use () to isolate first e - local t = { ... } --- local report = (type(t[#t]) == "function" and t[#t]) or fallbackreport - local report = (type(t[#t]) == "function" and t[#t]) or (texio and texio.write) or io.write - if e == nil then - report("\n") - elseif type(e) ~= "table" then - report(tostring(e)) - elseif e.tg then - report(tostring(e) .. "\n") - else - for i=1,#e do - report(tostring(e[i]) .. "\n") +apply_axis['parent'] = function(list) + local collected = { } + for l=1,#list do + local pl = list[l].__p__ + if pl then + collected[#collected+1] = pl end end + return collected end ---[[ldx-- -

An is converted to a table with instructions for traversing the -tree. Hoever, simple cases are signaled by booleans. Because we don't know in -advance what we want to do with the found element the handle gets three arguments:

+apply_axis['attribute'] = function(list) + return { } +end - -r : the root element of the data table -d : the data table of the result -t : the index in the data table of the result - +apply_axis['namespace'] = function(list) + return { } +end -

Access to the root and data table makes it possible to construct insert and delete -functions.

---ldx]]-- - -local functions = xml.functions -local expressions = xml.expressions - -expressions.contains = string.find -expressions.find = string.find -expressions.upper = string.upper -expressions.lower = string.lower -expressions.number = tonumber -expressions.boolean = toboolean - -expressions.oneof = function(s,...) -- slow - local t = {...} for i=1,#t do if s == t[i] then return true end end return false -end - -expressions.error = function(str) - xml.error_handler("unknown function in lpath expression",str or "?") - return false -end - -functions.text = function(root,k,n) -- unchecked, maybe one deeper - local t = type(t) - if t == "string" then - return t - else -- todo n - local rdt = root.dt - return (rdt and rdt[k]) or root[k] or "" - end +apply_axis['following'] = function(list) -- incomplete +--~ local collected = { } +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni+1,#d do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ collected[#collected+1] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } +end + +apply_axis['preceding'] = function(list) -- incomplete +--~ local collected = { } +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni-1,1,-1 do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ collected[#collected+1] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } end -functions.name = function(d,k,n) -- ns + tg - local found = false - n = n or 0 - if not k then - -- not found - elseif n == 0 then - local dk = d[k] - found = dk and (type(dk) == "table") and dk - elseif n < 0 then - for i=k-1,1,-1 do - local di = d[i] - if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end - end - end - else - for i=k+1,#d,1 do +apply_axis['following-sibling'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni+1,#d do local di = d[i] if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end + collected[#collected+1] = di end end end - if found then - local ns, tg = found.rn or found.ns or "", found.tg - if ns ~= "" then - return ns .. ":" .. tg - else - return tg - end - else - return "" - end + return collected end -functions.tag = function(d,k,n) -- only tg - local found = false - n = n or 0 - if not k then - -- not found - elseif n == 0 then - local dk = d[k] - found = dk and (type(dk) == "table") and dk - elseif n < 0 then - for i=k-1,1,-1 do +apply_axis['preceding-sibling'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=1,ll.ni-1 do local di = d[i] if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end + collected[#collected+1] = di end end - else - for i=k+1,#d,1 do + end + return collected +end + +apply_axis['reverse-sibling'] = function(list) -- reverse preceding + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni-1,1,-1 do local di = d[i] if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end + collected[#collected+1] = di end end end - return (found and found.tg) or "" + return collected end -expressions.text = functions.text -expressions.name = functions.name -expressions.tag = functions.tag +apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self'] +apply_axis['auto-descendant'] = apply_axis['descendant'] +apply_axis['auto-child'] = apply_axis['child'] +apply_axis['auto-self'] = apply_axis['self'] +apply_axis['initial-child'] = apply_axis['child'] -local function traverse(root,pattern,handle,reverse,index,parent,wildcard) -- multiple only for tags, not for namespaces - if not root then -- error - return false - elseif pattern == false then -- root - handle(root,root.dt,root.ri) - return false - elseif pattern == true then -- wildcard - local rootdt = root.dt - if rootdt then - local start, stop, step = 1, #rootdt, 1 - if reverse then - start, stop, step = stop, start, -1 - end - for k=start,stop,step do - if handle(root,rootdt,root.ri or k) then return false end - if not traverse(rootdt[k],true,handle,reverse) then return false end - end - end - return false - elseif root.dt then - index = index or 1 - local action = pattern[index] - local command = action[1] - if command == 29 then -- fast case /oeps - local rootdt = root.dt - for k=1,#rootdt do - local e = rootdt[k] - local tg = e.tg - if e.tg then - local ns = e.rn or e.ns - local ns_a, tg_a = action[3], action[4] - local matched = (ns_a == "*" or ns == ns_a) and (tg_a == "*" or tg == tg_a) - if not action[2] then matched = not matched end - if matched then - if handle(root,rootdt,k) then return false end - end - end - end - elseif command == 11 then -- parent - local ep = root.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false +local function apply_nodes(list,directive,nodes) + -- todo: nodes[1] etc ... negated node name in set ... when needed + -- ... currently ignored + local maxn = #nodes + if maxn == 3 then --optimized loop + local nns, ntg = nodes[2], nodes[3] + if not nns and not ntg then -- wildcard + if directive then + return list + else + return { } end else - if (command == 16 or command == 12) and index == 1 then -- initial - -- wildcard = true - wildcard = command == 16 -- ok? - index = index + 1 - action = pattern[index] - command = action and action[1] or 0 -- something is wrong - end - if command == 11 then -- parent - local ep = root.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false - end - else - local rootdt = root.dt - local start, stop, step, n, dn = 1, #rootdt, 1, 0, 1 - if command == 30 then - if action[5] < 0 then - start, stop, step = stop, start, -1 - dn = -1 - end - elseif reverse and index == #pattern then - start, stop, step = stop, start, -1 - end - local idx = 0 - local hsh = { } -- this will slooow down the lot - for k=start,stop,step do -- we used to have functions for all but a case is faster - local e = rootdt[k] - local ns, tg = e.rn or e.ns, e.tg - if tg then - -- we can optimize this for simple searches, but it probably does not pay off - hsh[tg] = (hsh[tg] or 0) + 1 - idx = idx + 1 - if command == 30 then - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false + local collected, m, p = { }, 0, nil + if not nns then -- only check tag + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + if directive then + if ntg == ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - if not action[2] then matched = not matched end - if matched then - n = n + dn - if n == action[5] then - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - break - end - elseif wildcard then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end - end - else - local matched, multiple = false, false - if command == 20 then -- match - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - elseif command == 21 then -- match one of - multiple = true - for i=3,#action,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - elseif command == 22 then -- eq - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - matched = matched and e.at[action[6]] == action[7] - elseif command == 23 then -- ne - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = mached and e.at[action[6]] ~= action[7] - elseif command == 24 then -- one of eq - multiple = true - for i=3,#action-2,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[#action-1]] == action[#action] - elseif command == 25 then -- one of ne - multiple = true - for i=3,#action-2,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[#action-1]] ~= action[#action] - elseif command == 27 then -- has attribute - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[5]] - elseif command == 28 then -- has value - local edt, ns_a, tg_a = e.dt, action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = matched and edt and edt[1] == action[5] - elseif command == 31 then - local edt, ns_a, tg_a = e.dt, action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - if matched then - matched = action[6](expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1) - end - end - if matched then -- combine tg test and at test - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - if wildcard then - if multiple then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end - else - -- maybe or multiple; anyhow, check on (section|title) vs just section and title in example in lxml - if not traverse(e,pattern,handle,reverse,index,root) then return false end - end - end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - elseif command == 14 then -- any - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - elseif command == 15 then -- many - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root,true) then return false end - end - -- not here : 11 - elseif command == 11 then -- parent - local ep = e.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,root,index+1) then return false end - elseif handle(root,rootdt,k) then - return false - end - elseif command == 40 and e.special and tg == "@pi@" then -- pi - local pi = action[2] - if pi ~= "" then - local pt = e.dt[1] - if pt and pt:find(pi) then - if handle(root,rootdt,k) then - return false - end - end - elseif handle(root,rootdt,k) then - return false - end - elseif wildcard then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end + elseif ntg ~= ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + end + end + elseif not ntg then -- only check namespace + for l=1,#list do + local ll = list[l] + local lns = ll.rn or ll.ns + if lns then + if directive then + if lns == nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end + elseif lns ~= nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - else - -- not here : 11 - if command == 11 then -- parent - local ep = e.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false + end + end + else -- check both + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = ltg == ntg and lns == nns + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - break -- else loop + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end end end end + return collected + end + else + local collected, m, p = { }, 0, nil + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = false + for n=1,maxn,3 do + local nns, ntg = nodes[n+1], nodes[n+2] + ok = (not ntg or ltg == ntg) and (not nns or lns == nns) + if ok then + break + end + end + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + end end + return collected end - return true end -xml.traverse = traverse +local quit_expression = false ---[[ldx-- -

Next come all kind of locators and manipulators. The most generic function here -is xml.filter(root,pattern). All registers functions in the filters namespace -can be path of a search path, as in:

+local function apply_expression(list,expression,order) + local collected = { } + quit_expression = false + for l=1,#list do + local ll = list[l] + if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1 + collected[#collected+1] = ll + end + if quit_expression then + break + end + end + return collected +end + +local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb + +local spaces = S(" \n\r\t\f")^0 +local lp_space = S(" \n\r\t\f") +local lp_any = P(1) +local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") +local lp_doequal = P("=") / "==" +local lp_or = P("|") / " or " +local lp_and = P("&") / " and " + +local lp_builtin = P ( + P("firstindex") / "1" + + P("lastindex") / "(#ll.__p__.dt or 1)" + + P("firstelement") / "1" + + P("lastelement") / "(ll.__p__.en or 1)" + + P("first") / "1" + + P("last") / "#list" + + P("rootposition") / "order" + + P("position") / "l" + -- is element in finalizer + P("order") / "order" + + P("element") / "(ll.ei or 1)" + + P("index") / "(ll.ni or 1)" + + P("match") / "(ll.mi or 1)" + + P("text") / "(ll.dt[1] or '')" + + -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" + + P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" + + P("tag") / "ll.tg" + + P("ns") / "ll.ns" + ) * ((spaces * P("(") * spaces * P(")"))/"") + +local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * R("az","AZ","--","__")^1 * Cc("'])") +local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end) +local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end) +local lp_fastpos = lp_fastpos_n + lp_fastpos_p +local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false") + +local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling + return t .. "(" +end - -local r, d, k = xml.filter(root,"/a/b/c/position(4)" - ---ldx]]-- +local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling + if expressions[t] then + return "expr." .. t .. "(" + else + return "expr.error(" + end +end -local traverse, lpath, convert = xml.traverse, xml.lpath, xml.convert +local lparent = lpeg.P("(") +local rparent = lpeg.P(")") +local noparent = 1 - (lparent+rparent) +local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent} +local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"} -xml.filters = { } +local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')") +local lp_number = S("+-") * R("09")^1 +local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'") +local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"')) -function xml.filters.default(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end) - return dt and dt[dk], rt, dt, dk -end +local cleaner -function xml.filters.attributes(root,pattern,arguments) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end) - local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at) - if ekat then - if arguments then - return ekat[arguments] or "", rt, dt, dk +local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s) + if expressions[t] then + s = s and s ~= "" and lpegmatch(cleaner,s) + if s and s ~= "" then + return "expr." .. t .. "(ll," .. s ..")" else - return ekat, rt, dt, dk + return "expr." .. t .. "(ll)" end else - return { }, rt, dt, dk + return "expr.error(" .. t .. ")" end end -function xml.filters.reverse(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse') - return dt and dt[dk], rt, dt, dk -end +local content = + lp_builtin + + lp_attribute + + lp_special + + lp_noequal + lp_doequal + + lp_or + lp_and + + lp_reserved + + lp_lua_function + lp_function + + lp_content + -- too fragile + lp_child + + lp_any + +local converter = Cs ( + lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0 +) -function xml.filters.count(root,pattern,everything) - local n = 0 - traverse(root, lpath(pattern), function(r,d,t) - if everything or type(d[t]) == "table" then - n = n + 1 - end - end) - return n -end +cleaner = Cs ( ( +--~ lp_fastpos + + lp_reserved + + lp_number + + lp_string + +1 )^1 ) -function xml.filters.elements(root, pattern) -- == all - local t = { } - traverse(root, lpath(pattern), function(r,d,k) - local e = d[k] - if e then - t[#t+1] = e - end - end) - return t -end -function xml.filters.texts(root, pattern) - local t = { } - traverse(root, lpath(pattern), function(r,d,k) - local e = d[k] - if e and e.dt then - t[#t+1] = e.dt - end - end) - return t -end +--~ expr -function xml.filters.first(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end) - return dt and dt[dk], rt, dt, dk -end +local template_e = [[ + local expr = xml.expressions + return function(list,ll,l,order) + return %s + end +]] -function xml.filters.last(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse') - return dt and dt[dk], rt, dt, dk -end +local template_f_y = [[ + local finalizer = xml.finalizers['%s']['%s'] + return function(collection) + return finalizer(collection,%s) + end +]] -function xml.filters.index(root,pattern,arguments) - local rt, dt, dk, reverse, i = nil, nil, nil, false, tonumber(arguments or '1') or 1 - if i and i ~= 0 then - if i < 0 then - reverse, i = true, -i - end - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk, i = r, d, k, i-1 return i == 0 end, reverse) - if i == 0 then - return dt and dt[dk], rt, dt, dk - end +local template_f_n = [[ + return xml.finalizers['%s']['%s'] +]] + +-- + +local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } +local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } +local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } +local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } +local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } +local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } +local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } +local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } +local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } +local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } +local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } +local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } +local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } +local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } +local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } + +local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } +local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } +local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } +local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } + +local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } + +local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } + +local skip = { } + +local function errorrunner_e(str,cnv) + if not skip[str] then + logs.report("lpath","error in expression: %s => %s",str,cnv) + skip[str] = cnv or str end - return nil, nil, nil, nil + return false +end +local function errorrunner_f(str,arg) + logs.report("lpath","error in finalizer: %s(%s)",str,arg or "") + return false end -function xml.filters.attribute(root,pattern,arguments) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end) - local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at) - return (ekat and (ekat[arguments] or ekat[gsub(arguments,"^([\"\'])(.*)%1$","%2")])) or "" +local function register_nodes(nodetest,nodes) + return { kind = "nodes", nodetest = nodetest, nodes = nodes } end -function xml.filters.text(root,pattern,arguments) -- ?? why index, tostring slow - local dtk, rt, dt, dk = xml.filters.index(root,pattern,arguments) - if dtk then -- n - local dtkdt = dtk.dt - if not dtkdt then - return "", rt, dt, dk - elseif #dtkdt == 1 and type(dtkdt[1]) == "string" then - return dtkdt[1], rt, dt, dk - else - return xml.tostring(dtkdt), rt, dt, dk - end +local function register_expression(expression) + local converted = lpegmatch(converter,expression) + local runner = loadstring(format(template_e,converted)) + runner = (runner and runner()) or function() errorrunner_e(expression,converted) end + return { kind = "expression", expression = expression, converted = converted, evaluator = runner } +end + +local function register_finalizer(protocol,name,arguments) + local runner + if arguments and arguments ~= "" then + runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments)) else - return "", rt, dt, dk + runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name)) end + runner = (runner and runner()) or function() errorrunner_f(name,arguments) end + return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner } end -function xml.filters.tag(root,pattern,n) - local tag = "" - traverse(root, lpath(pattern), function(r,d,k) - tag = xml.functions.tag(d,k,n and tonumber(n)) - return true - end) - return tag -end +local expression = P { "ex", + ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]", + sq = "'" * (1 - S("'"))^0 * "'", + dq = '"' * (1 - S('"'))^0 * '"', +} -function xml.filters.name(root,pattern,n) - local tag = "" - traverse(root, lpath(pattern), function(r,d,k) - tag = xml.functions.name(d,k,n and tonumber(n)) - return true - end) - return tag +local arguments = P { "ar", + ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")", + nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end, + sq = P("'") * (1 - P("'"))^0 * P("'"), + dq = P('"') * (1 - P('"'))^0 * P('"'), +} + +-- todo: better arg parser + +local function register_error(str) + return { kind = "error", error = format("unparsed: %s",str) } end ---[[ldx-- -

For splitting the filter function from the path specification, we can -use string matching or lpeg matching. Here the difference in speed is -neglectable but the lpeg variant is more robust.

---ldx]]-- +-- there is a difference in * and /*/ and so we need to catch a few special cases + +local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed +local special_2 = P("/") * Cc(register_auto_self) +local special_3 = P("") * Cc(register_auto_self) + +local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside + + patterns = spaces * V("protocol") * spaces * ( + ( V("special") * spaces * P(-1) ) + + ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) + ), + + protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), + + -- the / is needed for // as descendant or self is somewhat special + -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + + axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") + + V("descendant_or_self") + V("following_sibling") + V("following") + + V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + + #(1-P(-1)) * Cc(register_auto_child), + + special = special_1 + special_2 + special_3, --- not faster but hipper ... although ... i can't get rid of the trailing / in the path + initial = (P("/") * spaces * Cc(register_initial_child))^-1, -local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc + error = (P(1)^1) / register_error, -local slash = P('/') -local name = (R("az","AZ","--","__"))^1 -local path = C(((1-slash)^0 * slash)^1) -local argument = P { "(" * C(((1 - S("()")) + V(1))^0) * ")" } -local action = Cc(1) * path * C(name) * argument -local attribute = Cc(2) * path * P('@') * C(name) -local direct = Cc(3) * Cc("../*") * slash^0 * C(name) * argument + shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"), -local parser = direct + action + attribute + shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0, -local filters = xml.filters -local attribute_filter = xml.filters.attributes -local default_filter = xml.filters.default + s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus + -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self), + s_descendant = P("**") * Cc(register_descendant), + s_child = P("*") * #(1-P(":")) * Cc(register_child ), +-- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ), + s_parent = P("..") * Cc(register_parent ), + s_self = P("." ) * Cc(register_self ), + s_root = P("^^") * Cc(register_root ), + s_ancestor = P("^") * Cc(register_ancestor ), --- todo: also hash, could be gc'd + descendant = P("descendant::") * Cc(register_descendant ), + child = P("child::") * Cc(register_child ), + parent = P("parent::") * Cc(register_parent ), + self = P("self::") * Cc(register_self ), + root = P('root::') * Cc(register_root ), + ancestor = P('ancestor::') * Cc(register_ancestor ), + descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ), + ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ), + -- attribute = P('attribute::') * Cc(register_attribute ), + -- namespace = P('namespace::') * Cc(register_namespace ), + following = P('following::') * Cc(register_following ), + following_sibling = P('following-sibling::') * Cc(register_following_sibling ), + preceding = P('preceding::') * Cc(register_preceding ), + preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ), + reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ), -function xml.filter(root,pattern) - local kind, a, b, c = parser:match(pattern) - if kind == 1 or kind == 3 then - return (filters[b] or default_filter)(root,a,c) - elseif kind == 2 then - return attribute_filter(root,a,b) + nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes, + + expressions = expression / register_expression, + + letters = R("az")^1, + name = (1-lpeg.S("/[]()|:*!"))^1, + negate = P("!") * Cc(false), + + nodefunction = V("negate") + P("not") * Cc(false) + Cc(true), + nodetest = V("negate") + Cc(true), + nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))), + wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")), + nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces, + + finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer, + +} + +local cache = { } + +local function nodesettostring(set,nodetest) + local t = { } + for i=1,#set,3 do + local directive, ns, tg = set[i], set[i+1], set[i+2] + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + t[#t+1] = (directive and tg) or format("not(%s)",tg) + end + if nodetest == false then + return format("not(%s)",concat(t,"|")) else - return default_filter(root,pattern) + return concat(t,"|") end end ---~ slightly faster, but first we need a proper test file ---~ ---~ local hash = { } ---~ ---~ function xml.filter(root,pattern) ---~ local h = hash[pattern] ---~ if not h then ---~ local kind, a, b, c = parser:match(pattern) ---~ if kind == 1 then ---~ h = { kind, filters[b] or default_filter, a, b, c } ---~ elseif kind == 2 then ---~ h = { kind, attribute_filter, a, b, c } ---~ else ---~ h = { kind, default_filter, a, b, c } ---~ end ---~ hash[pattern] = h ---~ end ---~ local kind = h[1] ---~ if kind == 1 then ---~ return h[2](root,h[2],h[4]) ---~ elseif kind == 2 then ---~ return h[2](root,h[2],h[3]) ---~ else ---~ return h[2](root,pattern) ---~ end ---~ end +local function tagstostring(list) + if #list == 0 then + return "no elements" + else + local t = { } + for i=1, #list do + local li = list[i] + local ns, tg = li.ns, li.tg + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + t[#t+1] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + end + return concat(t," ") + end +end ---[[ldx-- -

The following functions collect elements and texts.

---ldx]]-- +xml.nodesettostring = nodesettostring --- still somewhat bugged +local parse_pattern -- we have a harmless kind of circular reference -function xml.collect_elements(root, pattern, ignorespaces) - local rr, dd = { }, { } - traverse(root, lpath(pattern), function(r,d,k) - local dk = d and d[k] - if dk then - if ignorespaces and type(dk) == "string" and dk:find("[^%S]") then - -- ignore - else - local n = #rr+1 - rr[n], dd[n] = r, dk - end - end - end) - return dd, rr +local function lshow(parsed) + if type(parsed) == "string" then + parsed = parse_pattern(parsed) + end + local s = table.serialize_functions -- ugly + table.serialize_functions = false -- ugly + logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false)) + table.serialize_functions = s -- ugly end -function xml.collect_texts(root, pattern, flatten) - local t = { } -- no r collector - traverse(root, lpath(pattern), function(r,d,k) - if d then - local ek = d[k] - local tx = ek and ek.dt - if flatten then - if tx then - t[#t+1] = xml.tostring(tx) or "" +xml.lshow = lshow + +local function add_comment(p,str) + local pc = p.comment + if not pc then + p.comment = { str } + else + pc[#pc+1] = str + end +end + +parse_pattern = function (pattern) -- the gain of caching is rather minimal + lpathcalls = lpathcalls + 1 + if type(pattern) == "table" then + return pattern + else + local parsed = cache[pattern] + if parsed then + lpathcached = lpathcached + 1 + else + parsed = lpegmatch(parser,pattern) + if parsed then + parsed.pattern = pattern + local np = #parsed + if np == 0 then + parsed = { pattern = pattern, register_self, state = "parsing error" } + logs.report("lpath","parsing error in '%s'",pattern) + lshow(parsed) else - t[#t+1] = "" + -- we could have done this with a more complex parser but this + -- is cleaner + local pi = parsed[1] + if pi.axis == "auto-child" then + if false then + add_comment(parsed, "auto-child replaced by auto-descendant-or-self") + parsed[1] = register_auto_descendant_or_self + else + add_comment(parsed, "auto-child replaced by auto-descendant") + parsed[1] = register_auto_descendant + end + elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then + add_comment(parsed, "initial-child removed") -- we could also make it a auto-self + remove(parsed,1) + end + local np = #parsed -- can have changed + if np > 1 then + local pnp = parsed[np] + if pnp.kind == "nodes" and pnp.nodetest == true then + local nodes = pnp.nodes + if nodes[1] == true and nodes[2] == false and nodes[3] == false then + add_comment(parsed, "redundant final wildcard filter removed") + remove(parsed,np) + end + end + end end else - t[#t+1] = tx or "" + parsed = { pattern = pattern } + end + cache[pattern] = parsed + if trace_lparse and not trace_lprofile then + lshow(parsed) end - else - t[#t+1] = "" end - end) - return t + return parsed + end end -function xml.collect_tags(root, pattern, nonamespace) - local t = { } - xml.traverse(root, xml.lpath(pattern), function(r,d,k) - local dk = d and d[k] - if dk and type(dk) == "table" then - local ns, tg = e.ns, e.tg - if nonamespace then - t[#t+1] = tg -- if needed we can return an extra table - elseif ns == "" then - t[#t+1] = tg - else - t[#t+1] = ns .. ":" .. tg +-- we can move all calls inline and then merge the trace back +-- technically we can combine axis and the next nodes which is +-- what we did before but this a bit cleaner (but slower too) +-- but interesting is that it's not that much faster when we +-- go inline +-- +-- beware: we need to return a collection even when we filter +-- else the (simple) cache gets messed up + +-- caching found lookups saves not that much (max .1 sec on a 8 sec run) +-- and it also messes up finalizers + +-- watch out: when there is a finalizer, it's always called as there +-- can be cases that a finalizer returns (or does) something in case +-- there is no match; an example of this is count() + +local profiled = { } xml.profiled = profiled + +local function profiled_apply(list,parsed,nofparsed,order) + local p = profiled[parsed.pattern] + if p then + p.tested = p.tested + 1 + else + p = { tested = 1, matched = 0, finalized = 0 } + profiled[parsed.pattern] = p + end + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) + p.matched = p.matched + 1 + p.finalized = p.finalized + 1 + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + p.finalized = p.finalized + 1 + return collected end + return nil end - end) - return #t > 0 and {} + end + if collected then + p.matched = p.matched + 1 + end + return collected +end + +local function traced_apply(list,parsed,nofparsed,order) + if trace_lparse then + lshow(parsed) + end + logs.report("lpath", "collecting : %s",parsed.pattern) + logs.report("lpath", " root tags : %s",tagstostring(list)) + logs.report("lpath", " order : %s",order or "unset") + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest)) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) + logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "") + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "") + return collected + end + return nil + end + end + return collected end ---[[ldx-- -

Often using an iterators looks nicer in the code than passing handler -functions. The book describes how to use coroutines for that -purpose (). This permits -code like:

- - -for r, d, k in xml.elements(xml.load('text.xml'),"title") do - print(d[k]) +local function normal_apply(list,parsed,nofparsed,order) + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + local axis = pi.axis + if axis ~= "self" then + collected = apply_axis[axis](collected) + end + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + return pi.finalizer(collected) + end + if not collected or #collected == 0 then + local pf = i < nofparsed and parsed[nofparsed].finalizer + if pf then + return pf(collected) -- can be anything + end + return nil + end + end + return collected end - -

Which will print all the titles in the document. The iterator variant takes -1.5 times the runtime of the function variant which is due to the overhead in -creating the wrapper. So, instead of:

- - -function xml.filters.first(root,pattern) - for rt,dt,dk in xml.elements(root,pattern) - return dt and dt[dk], rt, dt, dk +local function parse_apply(list,pattern) + -- we avoid an extra call + local parsed = cache[pattern] + if parsed then + lpathcalls = lpathcalls + 1 + lpathcached = lpathcached + 1 + elseif type(pattern) == "table" then + lpathcalls = lpathcalls + 1 + parsed = pattern + else + parsed = parse_pattern(pattern) or pattern + end + if not parsed then + return + end + local nofparsed = #parsed + if nofparsed == 0 then + return -- something is wrong + end + local one = list[1] + if not one then + return -- something is wrong + elseif not trace_lpath then + return normal_apply(list,parsed,nofparsed,one.mi) + elseif trace_lprofile then + return profiled_apply(list,parsed,nofparsed,one.mi) + else + return traced_apply(list,parsed,nofparsed,one.mi) end - return nil, nil, nil, nil end - - -

We use the function variants in the filters.

---ldx]]-- -local wrap, yield = coroutine.wrap, coroutine.yield +-- internal (parsed) -function xml.elements(root,pattern,reverse) - return wrap(function() traverse(root, lpath(pattern), yield, reverse) end) +expressions.child = function(e,pattern) + return parse_apply({ e },pattern) -- todo: cache end - -function xml.elements_only(root,pattern,reverse) - return wrap(function() traverse(root, lpath(pattern), function(r,d,k) yield(d[k]) end, reverse) end) +expressions.count = function(e,pattern) + local collected = parse_apply({ e },pattern) -- todo: cache + return (collected and #collected) or 0 end -function xml.each_element(root, pattern, handle, reverse) - local ok - traverse(root, lpath(pattern), function(r,d,k) ok = true handle(r,d,k) end, reverse) - return ok +-- external + +expressions.oneof = function(s,...) -- slow + local t = {...} for i=1,#t do if s == t[i] then return true end end return false +end +expressions.error = function(str) + xml.error_handler("unknown function in lpath expression",tostring(str or "?")) + return false +end +expressions.undefined = function(s) + return s == nil end -function xml.process_elements(root, pattern, handle) - traverse(root, lpath(pattern), function(r,d,k) - local dkdt = d[k].dt - if dkdt then - for i=1,#dkdt do - local v = dkdt[i] - if v.tg then handle(v) end - end - end - end) +expressions.quit = function(s) + if s or s == nil then + quit_expression = true + end + return true end -function xml.process_attributes(root, pattern, handle) - traverse(root, lpath(pattern), function(r,d,k) - local ek = d[k] - local a = ek.at or { } - handle(a) - if next(a) then -- next is faster than type (and >0 test) - ek.at = a - else - ek.at = nil - end - end) +expressions.print = function(...) + print(...) + return true end ---[[ldx-- -

We've now arrives at the functions that manipulate the tree.

---ldx]]-- +expressions.contains = find +expressions.find = find +expressions.upper = upper +expressions.lower = lower +expressions.number = tonumber +expressions.boolean = toboolean -function xml.inject_element(root, pattern, element, prepend) - if root and element then - local matches, collect = { }, nil - if type(element) == "string" then - element = convert(element,true) - end - if element then - collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end - traverse(root, lpath(pattern), collect) - for i=1,#matches do - local m = matches[i] - local r, d, k, element, edt = m[1], m[2], m[3], m[4], nil - if element.ri then - element = element.dt[element.ri].dt - else - element = element.dt - end - if r.ri then - edt = r.dt[r.ri].dt - else - edt = d and d[k] and d[k].dt - end - if edt then - local be, af - if prepend then - be, af = xml.copy(element), edt - else - be, af = edt, xml.copy(element) - end - for i=1,#af do - be[#be+1] = af[i] - end - if r.ri then - r.dt[r.ri].dt = be - else - d[k].dt = be - end - else - -- r.dt = element.dt -- todo - end - end +-- user interface + +local function traverse(root,pattern,handle) + logs.report("xml","use 'xml.selection' instead for '%s'",pattern) + local collected = parse_apply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + handle(r,r.dt,e.ni) end end end --- todo: copy ! - -function xml.insert_element(root, pattern, element, before) -- todo: element als functie - if root and element then - if pattern == "/" then - xml.inject_element(root, pattern, element, before) - else - local matches, collect = { }, nil - if type(element) == "string" then - element = convert(element,true) - end - if element and element.ri then - element = element.dt[element.ri] - end - if element then - collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end - traverse(root, lpath(pattern), collect) - for i=#matches,1,-1 do - local m = matches[i] - local r, d, k, element = m[1], m[2], m[3], m[4] - if not before then k = k + 1 end - if element.tg then - insert(d,k,element) -- untested ---~ elseif element.dt then ---~ for _,v in ipairs(element.dt) do -- i added ---~ insert(d,k,v) ---~ k = k + 1 ---~ end ---~ end - else - local edt = element.dt - if edt then - for i=1,#edt do - insert(d,k,edt[i]) - k = k + 1 - end - end - end - end +local function selection(root,pattern,handle) + local collected = parse_apply({ root },pattern) + if collected then + if handle then + for c=1,#collected do + handle(collected[c]) end + else + return collected end end end -xml.insert_element_after = xml.insert_element -xml.insert_element_before = function(r,p,e) xml.insert_element(r,p,e,true) end -xml.inject_element_after = xml.inject_element -xml.inject_element_before = function(r,p,e) xml.inject_element(r,p,e,true) end +xml.parse_parser = parser +xml.parse_pattern = parse_pattern +xml.parse_apply = parse_apply +xml.traverse = traverse -- old method, r, d, k +xml.selection = selection -- new method, simple handle -function xml.delete_element(root, pattern) - local matches, deleted = { }, { } - local collect = function(r,d,k) matches[#matches+1] = { r, d, k } end - traverse(root, lpath(pattern), collect) - for i=#matches,1,-1 do - local m = matches[i] - deleted[#deleted+1] = remove(m[2],m[3]) - end - return deleted +local lpath = parse_pattern + +xml.lpath = lpath + +function xml.cached_patterns() + return cache end -function xml.replace_element(root, pattern, element) - if type(element) == "string" then - element = convert(element,true) - end - if element and element.ri then - element = element.dt[element.ri] - end - if element then - traverse(root, lpath(pattern), function(rm, d, k) - d[k] = element.dt -- maybe not clever enough - end) +-- generic function finalizer (independant namespace) + +local function dofunction(collected,fnc) + if collected then + local f = functions[fnc] + if f then + for c=1,#collected do + f(collected[c]) + end + else + logs.report("xml","unknown function '%s'",fnc) + end end end -local function load_data(name) -- == io.loaddata - local f, data = io.open(name), "" - if f then - data = f:read("*all",'b') -- 'b' ? - f:close() - end - return data +xml.finalizers.xml["function"] = dofunction +xml.finalizers.tex["function"] = dofunction + +-- functions + +expressions.text = function(e,n) + local rdt = e.__p__.dt + return (rdt and rdt[n]) or "" end -function xml.include(xmldata,pattern,attribute,recursive,loaddata) - -- parse="text" (default: xml), encoding="" (todo) - -- attribute = attribute or 'href' - pattern = pattern or 'include' - loaddata = loaddata or load_data - local function include(r,d,k) - local ek, name = d[k], nil - if not attribute or attribute == "" then - local ekdt = ek.dt - name = (type(ekdt) == "table" and ekdt[1]) or ekdt - end - if not name then - if ek.at then - for a in gmatch(attribute or "href","([^|]+)") do - name = ek.at[a] - if name then break end +expressions.name = function(e,n) -- ns + tg + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = type(e) == "table" and e + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break + else + n = n + 1 end end end - local data = (name and name ~= "" and loaddata(name)) or "" - if data == "" then - xml.empty(d,k) - elseif ek.at["parse"] == "text" then -- for the moment hard coded - d[k] = xml.escaped(data) - else - local xi = xml.convert(data) - if not xi then - xml.empty(d,k) - else - if recursive then - xml.include(xi,pattern,attribute,recursive,loaddata) + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 end - xml.assign(d,k,xi) end end end - xml.each_element(xmldata, pattern, include) + if found then + local ns, tg = found.rn or found.ns or "", found.tg + if ns ~= "" then + return ns .. ":" .. tg + else + return tg + end + else + return "" + end end -function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space ! - traverse(root, lpath(pattern), function(r,d,k) - local dkdt = d[k].dt - if dkdt then -- can be optimized - local t = { } - for i=1,#dkdt do - local str = dkdt[i] - if type(str) == "string" then - - if str == "" then - -- stripped +expressions.tag = function(e,n) -- only tg + if not e then + return "" + else + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = (type(e) == "table") and e -- seems to fail + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break else - if nolines then - str = gsub(str,"[ \n\r\t]+"," ") - end - if str == "" then - -- stripped - else - t[#t+1] = str - end + n = n + 1 end - else - t[#t+1] = str end end - d[k].dt = t - end - end) -end - -local function rename_space(root, oldspace, newspace) -- fast variant - local ndt = #root.dt - for i=1,ndt or 0 do - local e = root[i] - if type(e) == "table" then - if e.ns == oldspace then - e.ns = newspace - if e.rn then - e.rn = newspace + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 + end end end - local edt = e.dt - if edt then - rename_space(edt, oldspace, newspace) - end end + return (found and found.tg) or "" end end -xml.rename_space = rename_space +--[[ldx-- +

This is the main filter function. It returns whatever is asked for.

+--ldx]]-- -function xml.remap_tag(root, pattern, newtg) - traverse(root, lpath(pattern), function(r,d,k) - d[k].tg = newtg - end) -end -function xml.remap_namespace(root, pattern, newns) - traverse(root, lpath(pattern), function(r,d,k) - d[k].ns = newns - end) +function xml.filter(root,pattern) -- no longer funny attribute handling here + return parse_apply({ root },pattern) end -function xml.check_namespace(root, pattern, newns) - traverse(root, lpath(pattern), function(r,d,k) - local dk = d[k] - if (not dk.rn or dk.rn == "") and dk.ns == "" then - dk.rn = newns - end - end) + +--[[ldx-- +

Often using an iterators looks nicer in the code than passing handler +functions. The book describes how to use coroutines for that +purpose (). This permits +code like:

+ + +for r, d, k in xml.elements(xml.load('text.xml'),"title") do + print(d[k]) -- old method end -function xml.remap_name(root, pattern, newtg, newns, newrn) - traverse(root, lpath(pattern), function(r,d,k) - local dk = d[k] - dk.tg = newtg - dk.ns = newns - dk.rn = newrn - end) +for e in xml.collected(xml.load('text.xml'),"title") do + print(e) -- new one end + +--ldx]]-- -function xml.filters.found(root,pattern,check_content) - local found = false - traverse(root, lpath(pattern), function(r,d,k) - if check_content then - local dk = d and d[k] - found = dk and dk.dt and next(dk.dt) and true +local wrap, yield = coroutine.wrap, coroutine.yield + +function xml.elements(root,pattern,reverse) -- r, d, k + local collected = parse_apply({ root },pattern) + if collected then + if reverse then + return wrap(function() for c=#collected,1,-1 do + local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) + end end) else - found = true + return wrap(function() for c=1,#collected do + local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) + end end) end - return true - end) - return found + end + return wrap(function() end) end ---[[ldx-- -

Here are a few synonyms.

---ldx]]-- +function xml.collected(root,pattern,reverse) -- e + local collected = parse_apply({ root },pattern) + if collected then + if reverse then + return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end) + else + return wrap(function() for c=1,#collected do yield(collected[c]) end end) + end + end + return wrap(function() end) +end -xml.filters.position = xml.filters.index -xml.count = xml.filters.count -xml.index = xml.filters.index -xml.position = xml.filters.index -xml.first = xml.filters.first -xml.last = xml.filters.last -xml.found = xml.filters.found +end -- of closure -xml.each = xml.each_element -xml.process = xml.process_element -xml.strip = xml.strip_whitespace -xml.collect = xml.collect_elements -xml.all = xml.collect_elements +do -- create closure to overcome 200 locals limit -xml.insert = xml.insert_element_after -xml.inject = xml.inject_element_after -xml.after = xml.insert_element_after -xml.before = xml.insert_element_before -xml.delete = xml.delete_element -xml.replace = xml.replace_element +if not modules then modules = { } end modules ['lxml-mis'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local concat = table.concat +local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring +local format, gsub, match = string.format, string.gsub, string.match +local lpegmatch = lpeg.match --[[ldx-- -

The following helper functions best belong to the lmxl-ini +

The following helper functions best belong to the lxml-ini module. Some are here because we need then in the mk document and other manuals, others came up when playing with this module. Since this module is also used in we've put them here instead of loading mode modules there then needed.

--ldx]]-- -function xml.gsub(t,old,new) +local function xmlgsub(t,old,new) -- will be replaced local dt = t.dt if dt then for k=1,#dt do @@ -5069,28 +6366,26 @@ function xml.gsub(t,old,new) if type(v) == "string" then dt[k] = gsub(v,old,new) else - xml.gsub(v,old,new) + xmlgsub(v,old,new) end end end end +--~ xml.gsub = xmlgsub + function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual - if d and k and d[k-1] and type(d[k-1]) == "string" then - local s = d[k-1]:match("\n(%s+)") - xml.gsub(dk,"\n"..string.rep(" ",#s),"\n") + if d and k then + local dkm = d[k-1] + if dkm and type(dkm) == "string" then + local s = match(dkm,"\n(%s+)") + xmlgsub(dk,"\n"..rep(" ",#s),"\n") + end end end -function xml.serialize_path(root,lpath,handle) - local dk, r, d, k = xml.first(root,lpath) - dk = xml.copy(dk) - xml.strip_leading_spaces(dk,d,k) - xml.serialize(dk,handle) -end - --~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } ---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end +--~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end --~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end --~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end @@ -5114,8 +6409,6 @@ local escaped = Cs(normal * (special * normal)^0) -- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) --- unescaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) --- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0) local normal = (1 - S"&")^0 local special = P("<")/"<" + P(">")/">" + P("&")/"&" local unescaped = Cs(normal * (special * normal)^0) @@ -5124,84 +6417,32 @@ local unescaped = Cs(normal * (special * normal)^0) local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) -function xml.escaped (str) return escaped :match(str) end -function xml.unescaped(str) return unescaped:match(str) end -function xml.cleansed (str) return cleansed :match(str) end +xml.escaped_pattern = escaped +xml.unescaped_pattern = unescaped +xml.cleansed_pattern = cleansed -function xml.join(t,separator,lastseparator) - if #t > 0 then - local result = { } - for k,v in pairs(t) do - result[k] = xml.tostring(v) - end - if lastseparator then - return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result] - else - return concat(result,separator) +function xml.escaped (str) return lpegmatch(escaped,str) end +function xml.unescaped(str) return lpegmatch(unescaped,str) end +function xml.cleansed (str) return lpegmatch(cleansed,str) end + +-- this might move + +function xml.fillin(root,pattern,str,check) + local e = xml.first(root,pattern) + if e then + local n = #e.dt + if not check or n == 0 or (n == 1 and e.dt[1] == "") then + e.dt = { str } end - else - return "" end end -function xml.statistics() - return { - lpathcalls = lpathcalls, - lpathcached = lpathcached, - } -end - --- xml.set_text_cleanup(xml.show_text_entities) --- xml.set_text_cleanup(xml.resolve_text_entities) - ---~ xml.lshow("/../../../a/(b|c)[@d='e']/f") ---~ xml.lshow("/../../../a/!(b|c)[@d='e']/f") ---~ xml.lshow("/../../../a/!b[@d!='e']/f") - ---~ x = xml.convert([[ ---~ ---~ 01 ---~ 02 ---~ 03 ---~ OK ---~ 05 ---~ 06 ---~ ALSO OK ---~ ---~ ]]) - ---~ xml.settrace("lpath",true) - ---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == 'ok']")) ---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == upper('ok')]")) ---~ xml.xshow(xml.first(x,"b[@n=='03' or @n=='08']")) ---~ xml.xshow(xml.all (x,"b[number(@n)>2 and number(@n)<6]")) ---~ xml.xshow(xml.first(x,"b[find(text(),'ALSO')]")) - ---~ str = [[ ---~ ---~ ---~ my secret ---~ ---~ ]] - ---~ x = xml.convert([[ ---~ 0102xx03OK ---~ ]]) ---~ xml.xshow(xml.first(x,"b[tag(2) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(1) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(-1) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(-2) == 'x']")) - ---~ print(xml.filter(x,"b/tag(2)")) ---~ print(xml.filter(x,"b/tag(1)")) - end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['lxml-ent'] = { +if not modules then modules = { } end modules ['lxml-aux'] = { version = 1.001, comment = "this module is the basis for the lxml-* ones", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", @@ -5209,457 +6450,836 @@ if not modules then modules = { } end modules ['lxml-ent'] = { license = "see context related readme files" } -local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, gsub, find = string.format, string.gsub, string.find -local utfchar = unicode.utf8.char +-- not all functions here make sense anymore vbut we keep them for +-- compatibility reasons ---[[ldx-- -

We provide (at least here) two entity handlers. The more extensive -resolver consults a hash first, tries to convert to next, -and finaly calls a handler when defines. When this all fails, the -original entity is returned.

---ldx]]-- +local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end) -xml.entities = xml.entities or { } -- xml.entity_handler == function +local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name +local xmlinheritedconvert = xml.inheritedconvert -function xml.entity_handler(e) - return format("[%s]",e) -end +local type = type +local insert, remove = table.insert, table.remove +local gmatch, gsub = string.gmatch, string.gsub -local function toutf(s) - return utfchar(tonumber(s,16)) +local function report(what,pattern,c,e) + logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern) end -local function utfize(root) - local d = root.dt - for k=1,#d do - local dk = d[k] - if type(dk) == "string" then - -- test prevents copying if no match - if find(dk,"&#x.-;") then - d[k] = gsub(dk,"&#x(.-);",toutf) +local function withelements(e,handle,depth) + if e and handle then + local edt = e.dt + if edt then + depth = depth or 0 + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + handle(e,depth) + withelements(e,handle,depth+1) + end end - else - utfize(dk) end end end -xml.utfize = utfize +xml.withelements = withelements -local function resolve(e) -- hex encoded always first, just to avoid mkii fallbacks - if find(e,"^#x") then - return utfchar(tonumber(e:sub(3),16)) - elseif find(e,"^#") then - return utfchar(tonumber(e:sub(2))) - else - local ee = xml.entities[e] -- we cannot shortcut this one (is reloaded) - if ee then - return ee - else - local h = xml.entity_handler - return (h and h(e)) or "&" .. e .. ";" +function xml.withelement(e,n,handle) -- slow + if e and n ~= 0 and handle then + local edt = e.dt + if edt then + if n > 0 then + for i=1,#edt do + local ei = edt[i] + if type(ei) == "table" then + if n == 1 then + handle(ei) + return + else + n = n - 1 + end + end + end + elseif n < 0 then + for i=#edt,1,-1 do + local ei = edt[i] + if type(ei) == "table" then + if n == -1 then + handle(ei) + return + else + n = n + 1 + end + end + end + end end end end -local function resolve_entities(root) - if not root.special or root.tg == "@rt@" then - local d = root.dt - for k=1,#d do - local dk = d[k] - if type(dk) == "string" then - if find(dk,"&.-;") then - d[k] = gsub(dk,"&(.-);",resolve) - end - else - resolve_entities(dk) +xml.elements_only = xml.collected + +function xml.each_element(root,pattern,handle,reverse) + local collected = xmlparseapply({ root },pattern) + if collected then + if reverse then + for c=#collected,1,-1 do + handle(collected[c]) + end + else + for c=1,#collected do + handle(collected[c]) end end + return collected end end -xml.resolve_entities = resolve_entities +xml.process_elements = xml.each_element -function xml.utfize_text(str) - if find(str,"&#") then - return (gsub(str,"&#x(.-);",toutf)) - else - return str +function xml.process_attributes(root,pattern,handle) + local collected = xmlparseapply({ root },pattern) + if collected and handle then + for c=1,#collected do + handle(collected[c].at) + end end + return collected end -function xml.resolve_text_entities(str) -- maybe an lpeg. maybe resolve inline - if find(str,"&") then - return (gsub(str,"&(.-);",resolve)) - else - return str - end +--[[ldx-- +

The following functions collect elements and texts.

+--ldx]]-- + +-- are these still needed -> lxml-cmp.lua + +function xml.collect_elements(root, pattern) + return xmlparseapply({ root },pattern) end -function xml.show_text_entities(str) - if find(str,"&") then - return (gsub(str,"&(.-);","[%1]")) - else - return str +function xml.collect_texts(root, pattern, flatten) -- todo: variant with handle + local collected = xmlparseapply({ root },pattern) + if collected and flatten then + local xmltostring = xml.tostring + for c=1,#collected do + collected[c] = xmltostring(collected[c].dt) + end end + return collected or { } end --- experimental, this will be done differently - -function xml.merge_entities(root) - local documententities = root.entities - local allentities = xml.entities - if documententities then - for k, v in next, documententities do - allentities[k] = v +function xml.collect_tags(root, pattern, nonamespace) + local collected = xmlparseapply({ root },pattern) + if collected then + local t = { } + for c=1,#collected do + local e = collected[c] + local ns, tg = e.ns, e.tg + if nonamespace then + t[#t+1] = tg + elseif ns == "" then + t[#t+1] = tg + else + t[#t+1] = ns .. ":" .. tg + end end + return t end end +--[[ldx-- +

We've now arrived at the functions that manipulate the tree.

+--ldx]]-- -end -- of closure +local no_root = { no_root = true } -do -- create closure to overcome 200 locals limit +function xml.redo_ni(d) + for k=1,#d do + local dk = d[k] + if type(dk) == "table" then + dk.ni = k + end + end +end -if not modules then modules = { } end modules ['lxml-mis'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} +local function xmltoelement(whatever,root) + if not whatever then + return nil + end + local element + if type(whatever) == "string" then + element = xmlinheritedconvert(whatever,root) + else + element = whatever -- we assume a table + end + if element.error then + return whatever -- string + end + if element then + --~ if element.ri then + --~ element = element.dt[element.ri].dt + --~ else + --~ element = element.dt + --~ end + end + return element +end -local concat = table.concat -local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, gsub = string.format, string.gsub +xml.toelement = xmltoelement ---[[ldx-- -

The following helper functions best belong to the lmxl-ini -module. Some are here because we need then in the mk -document and other manuals, others came up when playing with -this module. Since this module is also used in we've -put them here instead of loading mode modules there then needed.

---ldx]]-- +local function copiedelement(element,newparent) + if type(element) == "string" then + return element + else + element = xmlcopy(element).dt + if newparent and type(element) == "table" then + element.__p__ = newparent + end + return element + end +end -function xml.gsub(t,old,new) - local dt = t.dt - if dt then - for k=1,#dt do - local v = dt[k] - if type(v) == "string" then - dt[k] = gsub(v,old,new) - else - xml.gsub(v,old,new) +function xml.delete_element(root,pattern) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('deleting',pattern,c,e) + end + local d = p.dt + remove(d,e.ni) + xml.redo_ni(d) -- can be made faster and inlined end end end end -function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual - if d and k and d[k-1] and type(d[k-1]) == "string" then - local s = d[k-1]:match("\n(%s+)") - xml.gsub(dk,"\n"..string.rep(" ",#s),"\n") +function xml.replace_element(root,pattern,whatever) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('replacing',pattern,c,e) + end + local d = p.dt + d[e.ni] = copiedelement(element,p) + xml.redo_ni(d) -- probably not needed + end + end end end -function xml.serialize_path(root,lpath,handle) - local dk, r, d, k = xml.first(root,lpath) - dk = xml.copy(dk) - xml.strip_leading_spaces(dk,d,k) - xml.serialize(dk,handle) +local function inject_element(root,pattern,whatever,prepend) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + local d, k, rri = r.dt, e.ni, r.ri + local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt) + if edt then + local be, af + local cp = copiedelement(element,e) + if prepend then + be, af = cp, edt + else + be, af = edt, cp + end + for i=1,#af do + be[#be+1] = af[i] + end + if rri then + r.dt[rri].dt = be + else + d[k].dt = be + end + xml.redo_ni(d) + end + end + end end ---~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } ---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end +local function insert_element(root,pattern,whatever,before) -- todo: element als functie + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + local d, k = r.dt, e.ni + if not before then + k = k + 1 + end + insert(d,k,copiedelement(element,r)) + xml.redo_ni(d) + end + end +end ---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end ---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end ---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>" +xml.insert_element = insert_element +xml.insert_element_after = insert_element +xml.insert_element_before = function(r,p,e) insert_element(r,p,e,true) end +xml.inject_element = inject_element +xml.inject_element_after = inject_element +xml.inject_element_before = function(r,p,e) inject_element(r,p,e,true) end -local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs +local function include(xmldata,pattern,attribute,recursive,loaddata) + -- parse="text" (default: xml), encoding="" (todo) + -- attribute = attribute or 'href' + pattern = pattern or 'include' + loaddata = loaddata or io.loaddata + local collected = xmlparseapply({ xmldata },pattern) + if collected then + for c=1,#collected do + local ek = collected[c] + local name = nil + local ekdt = ek.dt + local ekat = ek.at + local epdt = ek.__p__.dt + if not attribute or attribute == "" then + name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- ckeck, probably always tab or str + end + if not name then + for a in gmatch(attribute or "href","([^|]+)") do + name = ekat[a] + if name then break end + end + end + local data = (name and name ~= "" and loaddata(name)) or "" + if data == "" then + epdt[ek.ni] = "" -- xml.empty(d,k) + elseif ekat["parse"] == "text" then + -- for the moment hard coded + epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data) + else +--~ local settings = xmldata.settings +--~ settings.parent_root = xmldata -- to be tested +--~ local xi = xmlconvert(data,settings) + local xi = xmlinheritedconvert(data,xmldata) + if not xi then + epdt[ek.ni] = "" -- xml.empty(d,k) + else + if recursive then + include(xi,pattern,attribute,recursive,loaddata) + end + epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi) + end + end + end + end +end --- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg --- --- 1021:0335:0287:0247 +xml.include = include --- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ" --- --- 1559:0257:0288:0190 (last one suggested by roberto) +--~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away +--~ local collected = xmlparseapply({ xmldata },pattern) +--~ if collected then +--~ local xmltostring = xml.tostring +--~ for c=1,#collected do +--~ local e = collected[c] +--~ local data = manipulator(xmltostring(e)) +--~ if data == "" then +--~ epdt[e.ni] = "" +--~ else +--~ local xi = xmlinheritedconvert(data,xmldata) +--~ if not xi then +--~ epdt[e.ni] = "" +--~ else +--~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi) +--~ end +--~ end +--~ end +--~ end +--~ end --- escaped = Cs((S("<&>") / xml.escapes + 1)^0) --- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) -local normal = (1 - S("<&>"))^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local escaped = Cs(normal * (special * normal)^0) +--~ xml.manipulate = manipulate --- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) +function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space ! + local collected = xmlparseapply({ root },pattern) + if collected then + for i=1,#collected do + local e = collected[i] + local edt = e.dt + if edt then + local t = { } + for i=1,#edt do + local str = edt[i] + if type(str) == "string" then + if str == "" then + -- stripped + else + if nolines then + str = gsub(str,"[ \n\r\t]+"," ") + end + if str == "" then + -- stripped + else + t[#t+1] = str + end + end + else + --~ str.ni = i + t[#t+1] = str + end + end + e.dt = t + end + end + end +end --- unescaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) --- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0) -local normal = (1 - S"&")^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local unescaped = Cs(normal * (special * normal)^0) +function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing + local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now + if collected then + for i=1,#collected do + local e = collected[i] + local edt = e.dt + if edt then + if anywhere then + local t = { } + for e=1,#edt do + local str = edt[e] + if type(str) ~= "string" then + t[#t+1] = str + elseif str ~= "" then + -- todo: lpeg for each case + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s*(.-)%s*$","%1") + if str ~= "" then + t[#t+1] = str + end + end + end + e.dt = t + else + -- we can assume a regular sparse xml table with no successive strings + -- otherwise we should use a while loop + if #edt > 0 then + -- strip front + local str = edt[1] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt,1) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s+","") + if str == "" then + remove(edt,1) + else + edt[1] = str + end + end + end + if #edt > 1 then + -- strip end + local str = edt[#edt] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"%s+$","") + if str == "" then + remove(edt) + else + edt[#edt] = str + end + end + end + end + end + end + end +end --- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference) +local function rename_space(root, oldspace, newspace) -- fast variant + local ndt = #root.dt + for i=1,ndt or 0 do + local e = root[i] + if type(e) == "table" then + if e.ns == oldspace then + e.ns = newspace + if e.rn then + e.rn = newspace + end + end + local edt = e.dt + if edt then + rename_space(edt, oldspace, newspace) + end + end + end +end -local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) +xml.rename_space = rename_space -xml.escaped_pattern = escaped -xml.unescaped_pattern = unescaped -xml.cleansed_pattern = cleansed +function xml.remap_tag(root, pattern, newtg) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + collected[c].tg = newtg + end + end +end -function xml.escaped (str) return escaped :match(str) end -function xml.unescaped(str) return unescaped:match(str) end -function xml.cleansed (str) return cleansed :match(str) end +function xml.remap_namespace(root, pattern, newns) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + collected[c].ns = newns + end + end +end -function xml.join(t,separator,lastseparator) - if #t > 0 then - local result = { } - for k,v in pairs(t) do - result[k] = xml.tostring(v) +function xml.check_namespace(root, pattern, newns) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + if (not e.rn or e.rn == "") and e.ns == "" then + e.rn = newns + end end - if lastseparator then - return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result] - else - return concat(result,separator) + end +end + +function xml.remap_name(root, pattern, newtg, newns, newrn) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + e.tg, e.ns, e.rn = newtg, newns, newrn end - else - return "" end end +--[[ldx-- +

Here are a few synonyms.

+--ldx]]-- + +xml.each = xml.each_element +xml.process = xml.process_element +xml.strip = xml.strip_whitespace +xml.collect = xml.collect_elements +xml.all = xml.collect_elements + +xml.insert = xml.insert_element_after +xml.inject = xml.inject_element_after +xml.after = xml.insert_element_after +xml.before = xml.insert_element_before +xml.delete = xml.delete_element +xml.replace = xml.replace_element + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['trac-tra'] = { +if not modules then modules = { } end modules ['lxml-xml'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "this module is the basis for the lxml-* ones", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } --- the tag is kind of generic and used for functions that are not --- bound to a variable, like node.new, node.copy etc (contrary to for instance --- node.has_attribute which is bound to a has_attribute local variable in mkiv) +local finalizers = xml.finalizers.xml +local xmlfilter = xml.filter -- we could inline this one for speed +local xmltostring = xml.tostring +local xmlserialize = xml.serialize -debugger = debugger or { } +local function first(collected) -- wrong ? + return collected and collected[1] +end -local counters = { } -local names = { } -local getinfo = debug.getinfo -local format, find, lower, gmatch = string.format, string.find, string.lower, string.gmatch +local function last(collected) + return collected and collected[#collected] +end --- one +local function all(collected) + return collected +end -local function hook() - local f = getinfo(2,"f").func - local n = getinfo(2,"Sn") --- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end - if f then - local cf = counters[f] - if cf == nil then - counters[f] = 1 - names[f] = n - else - counters[f] = cf + 1 +local function reverse(collected) + if collected then + local reversed = { } + for c=#collected,1,-1 do + reversed[#reversed+1] = collected[c] end + return reversed end end -local function getname(func) - local n = names[func] - if n then - if n.what == "C" then - return n.name or '' - else - -- source short_src linedefined what name namewhat nups func - local name = n.name or n.namewhat or n.what - if not name or name == "" then name = "?" end - return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name) - end - else - return "unknown" + +local function attribute(collected,name) + if collected and #collected > 0 then + local at = collected[1].at + return at and at[name] end end -function debugger.showstats(printer,threshold) - printer = printer or texio.write or print - threshold = threshold or 0 - local total, grandtotal, functions = 0, 0, 0 - printer("\n") -- ugly but ok - -- table.sort(counters) - for func, count in pairs(counters) do - if count > threshold then - local name = getname(func) - if not name:find("for generator") then - printer(format("%8i %s", count, name)) - total = total + count - end - end - grandtotal = grandtotal + count - functions = functions + 1 - end - printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) + +local function att(id,name) + local at = id.at + return at and at[name] end --- two +local function count(collected) + return (collected and #collected) or 0 +end ---~ local function hook() ---~ local n = getinfo(2) ---~ if n.what=="C" and not n.name then ---~ local f = tostring(debug.traceback()) ---~ local cf = counters[f] ---~ if cf == nil then ---~ counters[f] = 1 ---~ names[f] = n ---~ else ---~ counters[f] = cf + 1 ---~ end ---~ end ---~ end ---~ function debugger.showstats(printer,threshold) ---~ printer = printer or texio.write or print ---~ threshold = threshold or 0 ---~ local total, grandtotal, functions = 0, 0, 0 ---~ printer("\n") -- ugly but ok ---~ -- table.sort(counters) ---~ for func, count in pairs(counters) do ---~ if count > threshold then ---~ printer(format("%8i %s", count, func)) ---~ total = total + count ---~ end ---~ grandtotal = grandtotal + count ---~ functions = functions + 1 ---~ end ---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) ---~ end +local function position(collected,n) + if collected then + n = tonumber(n) or 0 + if n < 0 then + return collected[#collected + n + 1] + elseif n > 0 then + return collected[n] + else + return collected[1].mi or 0 + end + end +end --- rest +local function match(collected) + return (collected and collected[1].mi) or 0 -- match +end -function debugger.savestats(filename,threshold) - local f = io.open(filename,'w') - if f then - debugger.showstats(function(str) f:write(str) end,threshold) - f:close() +local function index(collected) + if collected then + return collected[1].ni end end -function debugger.enable() - debug.sethook(hook,"c") +local function attributes(collected,arguments) + if collected then + local at = collected[1].at + if arguments then + return at[arguments] + elseif next(at) then + return at -- all of them + end + end end -function debugger.disable() - debug.sethook() ---~ counters[debug.getinfo(2,"f").func] = nil +local function chainattribute(collected,arguments) -- todo: optional levels + if collected then + local e = collected[1] + while e do + local at = e.at + if at then + local a = at[arguments] + if a then + return a + end + else + break -- error + end + e = e.__p__ + end + end + return "" end -function debugger.tracing() - local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0 - if n > 0 then - function debugger.tracing() return true end ; return true +local function raw(collected) -- hybrid + if collected then + local e = collected[1] or collected + return (e and xmlserialize(e)) or "" -- only first as we cannot concat function else - function debugger.tracing() return false end ; return false + return "" end end ---~ debugger.enable() - ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) - ---~ debugger.disable() - ---~ print("") ---~ debugger.showstats() ---~ print("") ---~ debugger.showstats(print,3) - -trackers = trackers or { } +local function text(collected) -- hybrid + if collected then + local e = collected[1] or collected + return (e and xmltostring(e.dt)) or "" + else + return "" + end +end -local data, done = { }, { } +local function texts(collected) + if collected then + local t = { } + for c=1,#collected do + local e = collection[c] + if e and e.dt then + t[#t+1] = e.dt + end + end + return t + end +end -local function set(what,value) - if type(what) == "string" then - what = aux.settings_to_array(what) +local function tag(collected,n) + if collected then + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[#collected-n+1] + end + return c and c.tg end - for i=1,#what do - local w = what[i] - for d, f in next, data do - if done[d] then - -- prevent recursion due to wildcards - elseif find(d,w) then - done[d] = true - for i=1,#f do - f[i](value) - end +end + +local function name(collected,n) + if collected then + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[#collected-n+1] + end + if c then + if c.ns == "" then + return c.tg + else + return c.ns .. ":" .. c.tg end end end end -local function reset() - for d, f in next, data do - for i=1,#f do - f[i](false) +local function tags(collected,nonamespace) + if collected then + local t = { } + for c=1,#collected do + local e = collected[c] + local ns, tg = e.ns, e.tg + if nonamespace or ns == "" then + t[#t+1] = tg + else + t[#t+1] = ns .. ":" .. tg + end end + return t end end -function trackers.register(what,...) - what = lower(what) - local w = data[what] - if not w then - w = { } - data[what] = w - end - for _, fnc in next, { ... } do - local typ = type(fnc) - if typ == "function" then - w[#w+1] = fnc - elseif typ == "string" then - w[#w+1] = function(value) set(fnc,value,nesting) end +local function empty(collected) + if collected then + for c=1,#collected do + local e = collected[c] + if e then + local edt = e.dt + if edt then + local n = #edt + if n == 1 then + local edk = edt[1] + local typ = type(edk) + if typ == "table" then + return false + elseif edk ~= "" then -- maybe an extra tester for spacing only + return false + end + elseif n > 1 then + return false + end + end + end end end + return true end -function trackers.enable(what) - done = { } - set(what,true) +finalizers.first = first +finalizers.last = last +finalizers.all = all +finalizers.reverse = reverse +finalizers.elements = all +finalizers.default = all +finalizers.attribute = attribute +finalizers.att = att +finalizers.count = count +finalizers.position = position +finalizers.match = match +finalizers.index = index +finalizers.attributes = attributes +finalizers.chainattribute = chainattribute +finalizers.text = text +finalizers.texts = texts +finalizers.tag = tag +finalizers.name = name +finalizers.tags = tags +finalizers.empty = empty + +-- shortcuts -- we could support xmlfilter(id,pattern,first) + +function xml.first(id,pattern) + return first(xmlfilter(id,pattern)) end -function trackers.disable(what) - done = { } - if not what or what == "" then - trackers.reset(what) +function xml.last(id,pattern) + return last(xmlfilter(id,pattern)) +end + +function xml.count(id,pattern) + return count(xmlfilter(id,pattern)) +end + +function xml.attribute(id,pattern,a,default) + return attribute(xmlfilter(id,pattern),a,default) +end + +function xml.raw(id,pattern) + if pattern then + return raw(xmlfilter(id,pattern)) else - set(what,false) + return raw(id) end end -function trackers.reset(what) - done = { } - reset() +function xml.text(id,pattern) + if pattern then + -- return text(xmlfilter(id,pattern)) + local collected = xmlfilter(id,pattern) + return (collected and xmltostring(collected[1].dt)) or "" + elseif id then + -- return text(id) + return xmltostring(id.dt) or "" + else + return "" + end end -function trackers.list() -- pattern - local list = table.sortedkeys(data) - local user, system = { }, { } - for l=1,#list do - local what = list[l] - if find(what,"^%*") then - system[#system+1] = what - else - user[#user+1] = what - end - end - return user, system +xml.content = text + +function xml.position(id,pattern,n) -- element + return position(xmlfilter(id,pattern),n) +end + +function xml.match(id,pattern) -- number + return match(xmlfilter(id,pattern)) +end + +function xml.empty(id,pattern) + return empty(xmlfilter(id,pattern)) end +xml.all = xml.filter +xml.index = xml.position +xml.found = xml.filter + end -- of closure @@ -5667,7 +7287,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-env'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -5679,10 +7299,10 @@ if not modules then modules = { } end modules ['luat-env'] = { -- evolved before bytecode arrays were available and so a lot of -- code has disappeared already. -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local format = string.format +local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find +local unquote, quote = string.unquote, string.quote -- precautions @@ -5716,13 +7336,14 @@ if not environment.jobname then environ function environment.initialize_arguments(arg) local arguments, files = { }, { } environment.arguments, environment.files, environment.sortedflags = arguments, files, nil - for index, argument in pairs(arg) do + for index=1,#arg do + local argument = arg[index] if index > 0 then - local flag, value = argument:match("^%-+(.+)=(.-)$") + local flag, value = match(argument,"^%-+(.-)=(.-)$") if flag then - arguments[flag] = string.unquote(value or "") + arguments[flag] = unquote(value or "") else - flag = argument:match("^%-+(.+)") + flag = match(argument,"^%-+(.+)") if flag then arguments[flag] = true else @@ -5749,25 +7370,30 @@ function environment.argument(name,partial) return arguments[name] elseif partial then if not sortedflags then - sortedflags = { } - for _,v in pairs(table.sortedkeys(arguments)) do - sortedflags[#sortedflags+1] = "^" .. v + sortedflags = table.sortedkeys(arguments) + for k=1,#sortedflags do + sortedflags[k] = "^" .. sortedflags[k] end environment.sortedflags = sortedflags end -- example of potential clash: ^mode ^modefile - for _,v in ipairs(sortedflags) do - if name:find(v) then - return arguments[v:sub(2,#v)] + for k=1,#sortedflags do + local v = sortedflags[k] + if find(name,v) then + return arguments[sub(v,2,#v)] end end end return nil end +environment.argument("x",true) + function environment.split_arguments(separator) -- rather special, cut-off before separator local done, before, after = false, { }, { } - for _,v in ipairs(environment.original_arguments) do + local original_arguments = environment.original_arguments + for k=1,#original_arguments do + local v = original_arguments[k] if not done and v == separator then done = true elseif done then @@ -5784,16 +7410,17 @@ function environment.reconstruct_commandline(arg,noquote) if noquote and #arg == 1 then local a = arg[1] a = resolvers.resolve(a) - a = a:unquote() + a = unquote(a) return a - elseif next(arg) then + elseif #arg > 0 then local result = { } - for _,a in ipairs(arg) do -- ipairs 1 .. #n + for i=1,#arg do + local a = arg[i] a = resolvers.resolve(a) - a = a:unquote() - a = a:gsub('"','\\"') -- tricky - if a:find(" ") then - result[#result+1] = a:quote() + a = unquote(a) + a = gsub(a,'"','\\"') -- tricky + if find(a," ") then + result[#result+1] = quote(a) else result[#result+1] = a end @@ -5806,17 +7433,18 @@ end if arg then - -- new, reconstruct quoted snippets (maybe better just remnove the " then and add them later) + -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) local newarg, instring = { }, false - for index, argument in ipairs(arg) do - if argument:find("^\"") then - newarg[#newarg+1] = argument:gsub("^\"","") - if not argument:find("\"$") then + for index=1,#arg do + local argument = arg[index] + if find(argument,"^\"") then + newarg[#newarg+1] = gsub(argument,"^\"","") + if not find(argument,"\"$") then instring = true end - elseif argument:find("\"$") then - newarg[#newarg] = newarg[#newarg] .. " " .. argument:gsub("\"$","") + elseif find(argument,"\"$") then + newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") instring = false elseif instring then newarg[#newarg] = newarg[#newarg] .. " " .. argument @@ -5871,12 +7499,12 @@ function environment.luafilechunk(filename) -- used for loading lua bytecode in filename = file.replacesuffix(filename, "lua") local fullname = environment.luafile(filename) if fullname and fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading file %s", fullname) end return environment.loadedluacode(fullname) else - if trace_verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end return nil @@ -5896,7 +7524,7 @@ function environment.loadluafile(filename, version) -- when not overloaded by explicit suffix we look for a luc file first local fullname = (lucname and environment.luafile(lucname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check @@ -5914,7 +7542,7 @@ function environment.loadluafile(filename, version) if v == version then return true else - if trace_verbose then + if trace_locating then logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version) end environment.loadluafile(filename) @@ -5925,12 +7553,12 @@ function environment.loadluafile(filename, version) end fullname = (luaname and environment.luafile(luaname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check if not chunk then - if verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end else @@ -5948,7 +7576,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-inf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-inf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -5973,6 +7601,14 @@ function statistics.hastimer(instance) return instance and instance.starttime end +function statistics.resettiming(instance) + if not instance then + notimer = { timing = 0, loadtime = 0 } + else + instance.timing, instance.loadtime = 0, 0 + end +end + function statistics.starttiming(instance) if not instance then notimer = { } @@ -5987,6 +7623,8 @@ function statistics.starttiming(instance) if not instance.loadtime then instance.loadtime = 0 end + else +--~ logs.report("system","nested timing (%s)",tostring(instance)) end instance.timing = it + 1 end @@ -6032,6 +7670,12 @@ function statistics.elapsedindeed(instance) return t > statistics.threshold end +function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds + if statistics.elapsedindeed(instance) then + return format("%s seconds %s", statistics.elapsedtime(instance),rest or "") + end +end + -- general function function statistics.register(tag,fnc) @@ -6110,14 +7754,32 @@ function statistics.timed(action,report) report("total runtime: %s",statistics.elapsedtime(timer)) end +-- where, not really the best spot for this: + +commands = commands or { } + +local timer + +function commands.resettimer() + statistics.resettiming(timer) + statistics.starttiming(timer) +end + +function commands.elapsedtime() + statistics.stoptiming(timer) + tex.sprint(statistics.elapsedtime(timer)) +end + +commands.resettimer() + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['luat-log'] = { +if not modules then modules = { } end modules ['trac-log'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-log.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6125,7 +7787,11 @@ if not modules then modules = { } end modules ['luat-log'] = { -- this is old code that needs an overhaul -local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format +--~ io.stdout:setvbuf("no") +--~ io.stderr:setvbuf("no") + +local write_nl, write = texio.write_nl or print, texio.write or io.write +local format, gmatch = string.format, string.gmatch local texcount = tex and tex.count if texlua then @@ -6206,25 +7872,48 @@ function logs.tex.line(fmt,...) -- new end end +--~ function logs.tex.start_page_number() +--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +--~ if real > 0 then +--~ if user > 0 then +--~ if sub > 0 then +--~ write(format("[%s.%s.%s",real,user,sub)) +--~ else +--~ write(format("[%s.%s",real,user)) +--~ end +--~ else +--~ write(format("[%s",real)) +--~ end +--~ else +--~ write("[-") +--~ end +--~ end + +--~ function logs.tex.stop_page_number() +--~ write("]") +--~ end + +local real, user, sub + function logs.tex.start_page_number() - local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno + real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +end + +function logs.tex.stop_page_number() if real > 0 then if user > 0 then if sub > 0 then - write(format("[%s.%s.%s",real,user,sub)) + logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub) else - write(format("[%s.%s",real,user)) + logs.report("pages", "flushing realpage %s, userpage %s",real,user) end else - write(format("[%s",real)) + logs.report("pages", "flushing realpage %s",real) end else - write("[-") + logs.report("pages", "flushing page") end -end - -function logs.tex.stop_page_number() - write("]") + io.flush() end logs.tex.report_job_stat = statistics.show_job_stat @@ -6324,7 +8013,7 @@ end function logs.setprogram(_name_,_banner_,_verbose_) name, banner = _name_, _banner_ if _verbose_ then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") end logs.set_method("tex") logs.report = report -- also used in libraries @@ -6337,9 +8026,9 @@ end function logs.setverbose(what) if what then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") else - trackers.disable("resolvers.verbose") + trackers.disable("resolvers.locating") end logs.verbose = what or false end @@ -6356,7 +8045,7 @@ logs.report = logs.tex.report logs.simple = logs.tex.report function logs.reportlines(str) -- todo: - for line in str:gmatch("(.-)[\n\r]") do + for line in gmatch(str,"(.-)[\n\r]") do logs.report(line) end end @@ -6367,8 +8056,12 @@ end logs.simpleline = logs.reportline -function logs.help(message,option) +function logs.reportbanner() -- for scripts too logs.report(banner) +end + +function logs.help(message,option) + logs.reportbanner() logs.reportline() logs.reportlines(message) local moreinfo = logs.moreinfo or "" @@ -6400,6 +8093,11 @@ end --~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") --~ end +function logs.fatal(where,...) + logs.report(where,"fatal error: %s, aborting now",format(...)) + os.exit() +end + end -- of closure @@ -6407,10 +8105,10 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", - comment = "companion to luat-lib.tex", } -- After a few years using the code the large luat-inp.lua file @@ -6422,7 +8120,7 @@ if not modules then modules = { } end modules ['data-inp'] = { -- * some public auxiliary functions were made private -- -- TODO: os.getenv -> os.env[] --- TODO: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller) +-- TODO: instances.[hashes,cnffiles,configurations,522] -- TODO: check escaping in find etc, too much, too slow -- This lib is multi-purpose and can be loaded again later on so that @@ -6443,12 +8141,13 @@ if not modules then modules = { } end modules ['data-inp'] = { local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys local next, type = next, type +local lpegmatch = lpeg.match -local trace_locating, trace_detail, trace_verbose = false, false, false +local trace_locating, trace_detail, trace_expansions = false, false, false -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) -trackers.register("resolvers.detail", function(v) trace_detail = v trackers.enable("resolvers.verbose,resolvers.detail") end) +trackers.register("resolvers.locating", function(v) trace_locating = v end) +trackers.register("resolvers.details", function(v) trace_detail = v end) +trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo if not resolvers then resolvers = { @@ -6472,7 +8171,7 @@ resolvers.generators.notfound = { nil } resolvers.cacheversion = '1.0.1' resolvers.cnfname = 'texmf.cnf' resolvers.luaname = 'texmfcnf.lua' -resolvers.homedir = os.env[os.platform == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' +resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}' local dummy_path_expr = "^!*unset/*$" @@ -6514,8 +8213,8 @@ suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' } alternatives['map files'] = 'map' alternatives['enc files'] = 'enc' -alternatives['cid files'] = 'cid' -alternatives['fea files'] = 'fea' +alternatives['cid maps'] = 'cid' -- great, why no cid files +alternatives['font feature files'] = 'fea' -- and fea files here alternatives['opentype fonts'] = 'otf' alternatives['truetype fonts'] = 'ttf' alternatives['truetype collections'] = 'ttc' @@ -6531,6 +8230,11 @@ formats ['sfd'] = 'SFDFONTS' suffixes ['sfd'] = { 'sfd' } alternatives['subfont definition files'] = 'sfd' +-- lib paths + +formats ['lib'] = 'CLUAINPUTS' -- new (needs checking) +suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' } + -- In practice we will work within one tds tree, but i want to keep -- the option open to build tools that look at multiple trees, which is -- why we keep the tree specific data in a table. We used to pass the @@ -6653,8 +8357,10 @@ local function check_configuration() -- not yet ok, no time for debugging now -- bad luck end fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm - fix("FONTFEATURES", ".;$TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") - fix("FONTCIDMAPS" , ".;$TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- this will go away some day + fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//") end @@ -6669,7 +8375,7 @@ function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail' end end -resolvers.settrace(os.getenv("MTX.resolvers.TRACE") or os.getenv("MTX_INPUT_TRACE")) +resolvers.settrace(os.getenv("MTX_INPUT_TRACE")) function resolvers.osenv(key) local ie = instance.environment @@ -6757,37 +8463,43 @@ end -- work that well; the parsing is ok, but dealing with the resulting -- table is a pain because we need to work inside-out recursively +local function do_first(a,b) + local t = { } + for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end + return "{" .. concat(t,",") .. "}" +end + +local function do_second(a,b) + local t = { } + for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end + return "{" .. concat(t,",") .. "}" +end + +local function do_both(a,b) + local t = { } + for sa in gmatch(a,"[^,]+") do + for sb in gmatch(b,"[^,]+") do + t[#t+1] = sa .. sb + end + end + return "{" .. concat(t,",") .. "}" +end + +local function do_three(a,b,c) + return a .. b.. c +end + local function splitpathexpr(str, t, validate) -- no need for further optimization as it is only called a - -- few times, we can use lpeg for the sub; we could move - -- the local functions outside the body + -- few times, we can use lpeg for the sub + if trace_expansions then + logs.report("fileio","expanding variable '%s'",str) + end t = t or { } str = gsub(str,",}",",@}") str = gsub(str,"{,","{@,") -- str = "@" .. str .. "@" local ok, done - local function do_first(a,b) - local t = { } - for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end - return "{" .. concat(t,",") .. "}" - end - local function do_second(a,b) - local t = { } - for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end - return "{" .. concat(t,",") .. "}" - end - local function do_both(a,b) - local t = { } - for sa in gmatch(a,"[^,]+") do - for sb in gmatch(b,"[^,]+") do - t[#t+1] = sa .. sb - end - end - return "{" .. concat(t,",") .. "}" - end - local function do_three(a,b,c) - return a .. b.. c - end while true do done = false while true do @@ -6818,6 +8530,11 @@ local function splitpathexpr(str, t, validate) t[#t+1] = s end end + if trace_expansions then + for k=1,#t do + logs.report("fileio","% 4i: %s",k,t[k]) + end + end return t end @@ -6857,18 +8574,27 @@ end -- also we now follow the stupid route: if not set then just assume *one* -- cnf file under texmf (i.e. distribution) -resolvers.ownpath = resolvers.ownpath or nil -resolvers.ownbin = resolvers.ownbin or arg[-2] or arg[-1] or arg[0] or "luatex" -resolvers.autoselfdir = true -- false may be handy for debugging +local args = environment and environment.original_arguments or arg -- this needs a cleanup + +resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" +resolvers.ownbin = gsub(resolvers.ownbin,"\\","/") function resolvers.getownpath() - if not resolvers.ownpath then - if resolvers.autoselfdir and os.selfdir then - resolvers.ownpath = os.selfdir - else - local binary = resolvers.ownbin - if os.platform == "windows" then - binary = file.replacesuffix(binary,"exe") + local ownpath = resolvers.ownpath or os.selfdir + if not ownpath or ownpath == "" or ownpath == "unset" then + ownpath = args[-1] or arg[-1] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + if not ownpath or ownpath == "" then + ownpath = args[-0] or arg[-0] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + end + local binary = resolvers.ownbin + if not ownpath or ownpath == "" then + ownpath = ownpath and file.dirname(binary) + end + if not ownpath or ownpath == "" then + if os.binsuffix ~= "" then + binary = file.replacesuffix(binary,os.binsuffix) end for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do local b = file.join(p,binary) @@ -6880,30 +8606,39 @@ function resolvers.getownpath() local olddir = lfs.currentdir() if lfs.chdir(p) then local pp = lfs.currentdir() - if trace_verbose and p ~= pp then - logs.report("fileio","following symlink %s to %s",p,pp) + if trace_locating and p ~= pp then + logs.report("fileio","following symlink '%s' to '%s'",p,pp) end - resolvers.ownpath = pp + ownpath = pp lfs.chdir(olddir) else - if trace_verbose then - logs.report("fileio","unable to check path %s",p) + if trace_locating then + logs.report("fileio","unable to check path '%s'",p) end - resolvers.ownpath = p + ownpath = p end break end end end - if not resolvers.ownpath then resolvers.ownpath = '.' end + if not ownpath or ownpath == "" then + ownpath = "." + logs.report("fileio","forcing fallback ownpath .") + elseif trace_locating then + logs.report("fileio","using ownpath '%s'",ownpath) + end end - return resolvers.ownpath + resolvers.ownpath = ownpath + function resolvers.getownpath() + return resolvers.ownpath + end + return ownpath end local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" } local function identify_own() - local ownpath = resolvers.getownpath() or lfs.currentdir() + local ownpath = resolvers.getownpath() or dir.current() local ie = instance.environment if ownpath then if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end @@ -6916,10 +8651,10 @@ local function identify_own() if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end - if trace_verbose then + if trace_locating then for i=1,#own_places do local v = own_places[i] - logs.report("fileio","variable %s set to %s",v,resolvers.env(v) or "unknown") + logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown") end end identify_own = function() end @@ -6951,10 +8686,8 @@ end local function load_cnf_file(fname) fname = resolvers.clean_path(fname) local lname = file.replacesuffix(fname,'lua') - local f = io.open(lname) - if f then -- this will go - f:close() - local dname = file.dirname(fname) + if lfs.isfile(lname) then + local dname = file.dirname(fname) -- fname ? if not instance.configuration[dname] then resolvers.load_data(dname,'configuration',lname and file.basename(lname)) instance.order[#instance.order+1] = instance.configuration[dname] @@ -6962,8 +8695,8 @@ local function load_cnf_file(fname) else f = io.open(fname) if f then - if trace_verbose then - logs.report("fileio","loading %s", fname) + if trace_locating then + logs.report("fileio","loading configuration file %s", fname) end local line, data, n, k, v local dname = file.dirname(fname) @@ -6997,14 +8730,16 @@ local function load_cnf_file(fname) end end f:close() - elseif trace_verbose then - logs.report("fileio","skipping %s", fname) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'", fname) end end end local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared) - for _,c in ipairs(instance.order) do + local order = instance.order + for i=1,#order do + local c = order[i] for k,v in next, c do if not instance.variables[k] then if instance.environment[k] then @@ -7020,19 +8755,24 @@ end function resolvers.load_cnf() local function loadoldconfigdata() - for _, fname in ipairs(instance.cnffiles) do - load_cnf_file(fname) + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + load_cnf_file(cnffiles[i]) end end -- instance.cnffiles contain complete names now ! + -- we still use a funny mix of cnf and new but soon + -- we will switch to lua exclusively as we only use + -- the file to collect the tree roots if #instance.cnffiles == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)") end else - instance.rootpath = instance.cnffiles[1] - for k,fname in ipairs(instance.cnffiles) do - instance.cnffiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local cnffiles = instance.cnffiles + instance.rootpath = cnffiles[1] + for k=1,#cnffiles do + instance.cnffiles[k] = file.collapse_path(cnffiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -7060,8 +8800,9 @@ function resolvers.load_lua() -- yet harmless else instance.rootpath = instance.luafiles[1] - for k,fname in ipairs(instance.luafiles) do - instance.luafiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local luafiles = instance.luafiles + for k=1,#luafiles do + instance.luafiles[k] = file.collapse_path(luafiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -7093,14 +8834,14 @@ end function resolvers.append_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash append: %s",tag) + logs.report("fileio","hash '%s' appended",tag) end insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } ) end function resolvers.prepend_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash prepend: %s",tag) + logs.report("fileio","hash '%s' prepended",tag) end insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } ) end @@ -7124,9 +8865,11 @@ end -- locators function resolvers.locatelists() - for _, path in ipairs(resolvers.clean_path_list('TEXMF')) do - if trace_verbose then - logs.report("fileio","locating list of %s",path) + local texmfpaths = resolvers.clean_path_list('TEXMF') + for i=1,#texmfpaths do + local path = texmfpaths[i] + if trace_locating then + logs.report("fileio","locating list of '%s'",path) end resolvers.locatedatabase(file.collapse_path(path)) end @@ -7139,11 +8882,11 @@ end function resolvers.locators.tex(specification) if specification and specification ~= '' and lfs.isdir(specification) then if trace_locating then - logs.report("fileio",'! tex locator found: %s',specification) + logs.report("fileio","tex locator '%s' found",specification) end resolvers.append_hash('file',specification,filename) elseif trace_locating then - logs.report("fileio",'? tex locator not found: %s',specification) + logs.report("fileio","tex locator '%s' not found",specification) end end @@ -7157,7 +8900,9 @@ function resolvers.loadfiles() instance.loaderror = false instance.files = { } if not instance.renewcache then - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for k=1,#hashes do + local hash = hashes[k] resolvers.hashdatabase(hash.tag,hash.name) if instance.loaderror then break end end @@ -7171,8 +8916,9 @@ end -- generators: function resolvers.loadlists() - for _, hash in ipairs(instance.hashes) do - resolvers.generatedatabase(hash.tag) + local hashes = instance.hashes + for i=1,#hashes do + resolvers.generatedatabase(hashes[i].tag) end end @@ -7184,10 +8930,27 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.patterns.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification - if trace_verbose then - logs.report("fileio","scanning path %s",specification) + if trace_locating then + logs.report("fileio","scanning path '%s'",specification) end instance.files[tag] = { } local files = instance.files[tag] @@ -7203,7 +8966,8 @@ function resolvers.generators.tex(specification) full = spec end for name in directory(full) do - if not weird:match(name) then + if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then @@ -7236,7 +9000,7 @@ function resolvers.generators.tex(specification) end end action() - if trace_verbose then + if trace_locating then logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r) end end @@ -7251,11 +9015,48 @@ end -- we join them and split them after the expansion has taken place. This -- is more convenient. +--~ local checkedsplit = string.checkedsplit + +local cache = { } + +local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;"))) + +local function split_kpse_path(str) -- beware, this can be either a path or a {specification} + local found = cache[str] + if not found then + if str == "" then + found = { } + else + str = gsub(str,"\\","/") +--~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator) +local split = lpegmatch(splitter,str) + found = { } + for i=1,#split do + local s = split[i] + if not find(s,"^{*unset}*") then + found[#found+1] = s + end + end + if trace_expansions then + logs.report("fileio","splitting path specification '%s'",str) + for k=1,#found do + logs.report("fileio","% 4i: %s",k,found[k]) + end + end + cache[str] = found + end + end + return found +end + +resolvers.split_kpse_path = split_kpse_path + function resolvers.splitconfig() - for i,c in ipairs(instance) do - for k,v in pairs(c) do + for i=1,#instance do + local c = instance[i] + for k,v in next, c do if type(v) == 'string' then - local t = file.split_path(v) + local t = split_kpse_path(v) if #t > 1 then c[k] = t end @@ -7265,21 +9066,25 @@ function resolvers.splitconfig() end function resolvers.joinconfig() - for i,c in ipairs(instance.order) do - for k,v in pairs(c) do -- ipairs? + local order = instance.order + for i=1,#order do + local c = order[i] + for k,v in next, c do -- indexed? if type(v) == 'table' then c[k] = file.join_path(v) end end end end + function resolvers.split_path(str) if type(str) == 'table' then return str else - return file.split_path(str) + return split_kpse_path(str) end end + function resolvers.join_path(str) if type(str) == 'table' then return file.join_path(str) @@ -7291,8 +9096,9 @@ end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do - local t, h = { }, { } - for _,vv in ipairs(file.split_path(v)) do + local t, h, p = { }, { }, split_kpse_path(v) + for kk=1,#p do + local vv = p[kk] if vv ~= "" and not h[vv] then t[#t+1] = vv h[vv] = true @@ -7339,11 +9145,15 @@ function resolvers.serialize(files) end t[#t+1] = "return {" if instance.sortdata then - for _, k in pairs(sortedkeys(files)) do -- ipairs + local sortedfiles = sortedkeys(files) + for i=1,#sortedfiles do + local k = sortedfiles[i] local fk = files[k] if type(fk) == 'table' then t[#t+1] = "\t['" .. k .. "']={" - for _, kk in pairs(sortedkeys(fk)) do -- ipairs + local sortedfk = sortedkeys(fk) + for j=1,#sortedfk do + local kk = sortedfk[j] t[#t+1] = dump(kk,fk[kk],"\t\t") end t[#t+1] = "\t}," @@ -7368,12 +9178,18 @@ function resolvers.serialize(files) return concat(t,"\n") end +local data_state = { } + +function resolvers.data_state() + return data_state or { } +end + function resolvers.save_data(dataname, makename) -- untested without cache overload for cachename, files in next, instance[dataname] do local name = (makename or file.join)(cachename,dataname) local luaname, lucname = name .. ".lua", name .. ".luc" - if trace_verbose then - logs.report("fileio","preparing %s for %s",dataname,cachename) + if trace_locating then + logs.report("fileio","preparing '%s' for '%s'",dataname,cachename) end for k, v in next, files do if type(v) == "table" and #v == 1 then @@ -7387,24 +9203,25 @@ function resolvers.save_data(dataname, makename) -- untested without cache overl date = os.date("%Y-%m-%d"), time = os.date("%H:%M:%S"), content = files, + uuid = os.uuid(), } local ok = io.savedata(luaname,resolvers.serialize(data)) if ok then - if trace_verbose then - logs.report("fileio","%s saved in %s",dataname,luaname) + if trace_locating then + logs.report("fileio","'%s' saved in '%s'",dataname,luaname) end if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip - if trace_verbose then - logs.report("fileio","%s compiled to %s",dataname,lucname) + if trace_locating then + logs.report("fileio","'%s' compiled to '%s'",dataname,lucname) end else - if trace_verbose then - logs.report("fileio","compiling failed for %s, deleting file %s",dataname,lucname) + if trace_locating then + logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname) end os.remove(lucname) end - elseif trace_verbose then - logs.report("fileio","unable to save %s in %s (access error)",dataname,luaname) + elseif trace_locating then + logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname) end end end @@ -7416,19 +9233,20 @@ function resolvers.load_data(pathname,dataname,filename,makename) -- untested wi if blob then local data = blob() if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then - if trace_verbose then - logs.report("fileio","loading %s for %s from %s",dataname,pathname,filename) + data_state[#data_state+1] = data.uuid + if trace_locating then + logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = data.content else - if trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + if trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + elseif trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end end @@ -7447,15 +9265,17 @@ function resolvers.resetconfig() end function resolvers.loadnewconfig() - for _, cnf in ipairs(instance.luafiles) do + local luafiles = instance.luafiles + for i=1,#luafiles do + local cnf = luafiles[i] local pathname = file.dirname(cnf) local filename = file.join(pathname,resolvers.luaname) local blob = loadfile(filename) if blob then local data = blob() if data then - if trace_verbose then - logs.report("fileio","loading configuration file %s",filename) + if trace_locating then + logs.report("fileio","loading configuration file '%s'",filename) end if true then -- flatten to variable.progname @@ -7476,14 +9296,14 @@ function resolvers.loadnewconfig() instance['setup'][pathname] = data end else - if trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + if trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance['setup'][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance.order[#instance.order+1] = instance.setup[pathname] if instance.loaderror then break end @@ -7492,7 +9312,9 @@ end function resolvers.loadoldconfig() if not instance.renewcache then - for _, cnf in ipairs(instance.cnffiles) do + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + local cnf = cnffiles[i] local dname = file.dirname(cnf) resolvers.load_data(dname,'configuration') instance.order[#instance.order+1] = instance.configuration[dname] @@ -7682,7 +9504,7 @@ end function resolvers.expanded_path_list(str) if not str then - return ep or { } + return ep or { } -- ep ? elseif instance.savelists then -- engine+progname hash str = gsub(str,"%$","") @@ -7700,9 +9522,9 @@ end function resolvers.expanded_path_list_from_var(str) -- brrr local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$","")) if tmp ~= "" then - return resolvers.expanded_path_list(str) - else return resolvers.expanded_path_list(tmp) + else + return resolvers.expanded_path_list(str) end end @@ -7749,9 +9571,9 @@ function resolvers.isreadable.file(name) local readable = lfs.isfile(name) -- brrr if trace_detail then if readable then - logs.report("fileio","+ readable: %s",name) + logs.report("fileio","file '%s' is readable",name) else - logs.report("fileio","- readable: %s", name) + logs.report("fileio","file '%s' is not readable", name) end end return readable @@ -7767,7 +9589,7 @@ local function collect_files(names) for k=1,#names do local fname = names[k] if trace_detail then - logs.report("fileio","? blobpath asked: %s",fname) + logs.report("fileio","checking name '%s'",fname) end local bname = file.basename(fname) local dname = file.dirname(fname) @@ -7783,7 +9605,7 @@ local function collect_files(names) local files = blobpath and instance.files[blobpath] if files then if trace_detail then - logs.report("fileio",'? blobpath do: %s (%s)',blobpath,bname) + logs.report("fileio","deep checking '%s' (%s)",blobpath,bname) end local blobfile = files[bname] if not blobfile then @@ -7817,7 +9639,7 @@ local function collect_files(names) end end elseif trace_locating then - logs.report("fileio",'! blobpath no: %s (%s)',blobpath,bname) + logs.report("fileio","no match in '%s' (%s)",blobpath,bname) end end end @@ -7867,14 +9689,13 @@ end local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc) local result = collected or { } local stamp = nil - filename = file.collapse_path(filename) -- elsewhere - filename = file.collapse_path(gsub(filename,"\\","/")) -- elsewhere + filename = file.collapse_path(filename) -- speed up / beware: format problem if instance.remember then stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format if instance.found[stamp] then if trace_locating then - logs.report("fileio",'! remembered: %s',filename) + logs.report("fileio","remembering file '%s'",filename) end return instance.found[stamp] end @@ -7882,7 +9703,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not dangerous[instance.format or "?"] then if resolvers.isreadable.file(filename) then if trace_detail then - logs.report("fileio",'= found directly: %s',filename) + logs.report("fileio","file '%s' found directly",filename) end instance.found[stamp] = { filename } return { filename } @@ -7890,13 +9711,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end if find(filename,'%*') then if trace_locating then - logs.report("fileio",'! wildcard: %s', filename) + logs.report("fileio","checking wildcard '%s'", filename) end result = resolvers.find_wildcard_files(filename) elseif file.is_qualified_path(filename) then if resolvers.isreadable.file(filename) then if trace_locating then - logs.report("fileio",'! qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end result = { filename } else @@ -7906,7 +9727,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. ".tex" if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing standard filetype: tex') + logs.report("fileio","no suffix, forcing standard filetype 'tex'") end result, ok = { forcedname }, true end @@ -7916,7 +9737,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. "." .. s if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing format filetype: %s', s) + logs.report("fileio","no suffix, forcing format filetype '%s'", s) end result, ok = { forcedname }, true break @@ -7928,7 +9749,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- try to find in tree (no suffix manipulation), here we search for the -- matching last part of the name local basename = file.basename(filename) - local pattern = (filename .. "$"):gsub("([%.%-])","%%%1") + local pattern = gsub(filename .. "$","([%.%-])","%%%1") local savedformat = instance.format local format = savedformat or "" if format == "" then @@ -7938,19 +9759,21 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan instance.format = "othertextfiles" -- kind of everything, maybe texinput is better end -- - local resolved = collect_instance_files(basename) - if #result == 0 then - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered) + if basename ~= filename then + local resolved = collect_instance_files(basename) + if #result == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered) + end end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if rr:find(pattern) then - result[#result+1], ok = rr, true + resolvers.format = savedformat + -- + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1], ok = rr, true + end end end -- a real wildcard: @@ -7959,14 +9782,14 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- local filelist = collect_files({basename}) -- for f=1,#filelist do -- local ff = filelist[f][3] or "" - -- if ff:find(pattern) then + -- if find(ff,pattern) then -- result[#result+1], ok = ff, true -- end -- end -- end end if not ok and trace_locating then - logs.report("fileio",'? qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end end else @@ -7985,12 +9808,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan wantedfiles[#wantedfiles+1] = forcedname filetype = resolvers.format_of_suffix(forcedname) if trace_locating then - logs.report("fileio",'! forcing filetype: %s',filetype) + logs.report("fileio","forcing filetype '%s'",filetype) end else filetype = resolvers.format_of_suffix(filename) if trace_locating then - logs.report("fileio",'! using suffix based filetype: %s',filetype) + logs.report("fileio","using suffix based filetype '%s'",filetype) end end else @@ -8002,7 +9825,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end filetype = instance.format if trace_locating then - logs.report("fileio",'! using given filetype: %s',filetype) + logs.report("fileio","using given filetype '%s'",filetype) end end local typespec = resolvers.variable_of_format(filetype) @@ -8010,9 +9833,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then - logs.report("fileio",'? filename: %s',filename) - logs.report("fileio",'? filetype: %s',filetype or '?') - logs.report("fileio",'? wanted files: %s',concat(wantedfiles," | ")) + logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end for k=1,#wantedfiles do local fname = wantedfiles[k] @@ -8033,36 +9854,59 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan else -- list search local filelist = collect_files(wantedfiles) - local doscan, recurse + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = file.dirname(filelist[i][2]) .. "/" + end + end if trace_detail then - logs.report("fileio",'? filename: %s',filename) + logs.report("fileio","checking filename '%s'",filename) end -- a bit messy ... esp the doscan setting here + local doscan for k=1,#pathlist do local path = pathlist[k] if find(path,"^!!") then doscan = false else doscan = true end - if find(path,"//$") then recurse = true else recurse = false end local pathname = gsub(path,"^!+", '') done = false -- using file list - if filelist and not (done and not instance.allresults) and recurse then - -- compare list entries with permitted pattern - pathname = gsub(pathname,"([%-%.])","%%%1") -- this also influences - pathname = gsub(pathname,"/+$", '/.*') -- later usage of pathname - pathname = gsub(pathname,"//", '/.-/') -- not ok for /// but harmless - local expr = "^" .. pathname + if filelist then + local expression + -- compare list entries with permitted pattern -- /xx /xx// + if not find(pathname,"/$") then + expression = pathname .. "/" + else + expression = pathname + end + expression = gsub(expression,"([%-%.])","%%%1") -- this also influences + expression = gsub(expression,"//+$", '/.*') -- later usage of pathname + expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless + expression = "^" .. expression .. "$" + if trace_detail then + logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname) + end for k=1,#filelist do local fl = filelist[k] local f = fl[2] - if find(f,expr) then - if trace_detail then - logs.report("fileio",'= found in hash: %s',f) - end + local d = dirlist[k] + if find(d,expression) then --- todo, test for readable result[#result+1] = fl[3] resolvers.register_in_trees(f) -- for tracing used files done = true - if not instance.allresults then break end + if instance.allresults then + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d) + end + else + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d) + end + break + end + elseif trace_detail then + logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d) end end end @@ -8078,7 +9922,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan local fname = file.join(ppname,w) if resolvers.isreadable.file(fname) then if trace_detail then - logs.report("fileio",'= found by scanning: %s',fname) + logs.report("fileio","found '%s' by scanning",fname) end result[#result+1] = fname done = true @@ -8141,7 +9985,7 @@ function resolvers.find_given_files(filename) local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] - local files = instance.files[hash.tag] + local files = instance.files[hash.tag] or { } local blist = files[bname] if not blist then local rname = "remap:"..bname @@ -8251,9 +10095,9 @@ function resolvers.load(option) statistics.starttiming(instance) resolvers.resetconfig() resolvers.identify_cnf() - resolvers.load_lua() + resolvers.load_lua() -- will become the new method resolvers.expand_variables() - resolvers.load_cnf() + resolvers.load_cnf() -- will be skipped when we have a lua file resolvers.expand_variables() if option ~= "nofiles" then resolvers.load_hash() @@ -8265,22 +10109,23 @@ end function resolvers.for_files(command, files, filetype, mustexist) if files and #files > 0 then local function report(str) - if trace_verbose then + if trace_locating then logs.report("fileio",str) -- has already verbose else print(str) end end - if trace_verbose then - report('') + if trace_locating then + report('') -- ? end - for _, file in ipairs(files) do + for f=1,#files do + local file = files[f] local result = command(file,filetype,mustexist) if type(result) == 'string' then report(result) else - for _,v in ipairs(result) do - report(v) + for i=1,#result do + report(result[i]) -- could be unpack end end end @@ -8327,18 +10172,19 @@ end function table.sequenced(t,sep) -- temp here local s = { } - for k, v in pairs(t) do -- pairs? - s[#s+1] = k .. "=" .. v + for k, v in next, t do -- indexed? + s[#s+1] = k .. "=" .. tostring(v) end return concat(s, sep or " | ") end function resolvers.methodhandler(what, filename, filetype) -- ... + filename = file.collapse_path(filename) local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb local scheme = specification.scheme if resolvers[what][scheme] then if trace_locating then - logs.report("fileio",'= handler: %s -> %s -> %s',specification.original,what,table.sequenced(specification)) + logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) end return resolvers[what][scheme](filename,filetype) -- todo: specification else @@ -8358,8 +10204,9 @@ function resolvers.clean_path(str) end function resolvers.do_with_path(name,func) - for _, v in pairs(resolvers.expanded_path_list(name)) do -- pairs? - func("^"..resolvers.clean_path(v)) + local pathlist = resolvers.expanded_path_list(name) + for i=1,#pathlist do + func("^"..resolvers.clean_path(pathlist[i])) end end @@ -8368,7 +10215,9 @@ function resolvers.do_with_var(name,func) end function resolvers.with_files(pattern,handle) - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for i=1,#hashes do + local hash = hashes[i] local blobpath = hash.tag local blobtype = hash.type if blobpath then @@ -8383,7 +10232,7 @@ function resolvers.with_files(pattern,handle) if type(v) == "string" then handle(blobtype,blobpath,v,k) else - for _,vv in pairs(v) do -- ipairs? + for _,vv in next, v do -- indexed handle(blobtype,blobpath,vv,k) end end @@ -8395,7 +10244,7 @@ function resolvers.with_files(pattern,handle) end function resolvers.locate_format(name) - local barename, fmtname = name:gsub("%.%a+$",""), "" + local barename, fmtname = gsub(name,"%.%a+$",""), "" if resolvers.usecache then local path = file.join(caches.setpath("formats")) -- maybe platform fmtname = file.join(path,barename..".fmt") or "" @@ -8443,7 +10292,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8467,7 +10316,7 @@ luatools with a recache feature.

local format, lower, gsub = string.format, string.lower, string.gsub -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet caches = caches or { } @@ -8554,7 +10403,8 @@ function caches.setpath(...) caches.path = '.' end caches.path = resolvers.clean_path(caches.path) - if not table.is_empty({...}) then + local dirs = { ... } + if #dirs > 0 then local pth = dir.mkdirs(caches.path,...) return pth end @@ -8600,6 +10450,7 @@ function caches.savedata(filepath,filename,data,raw) if raw then reduce, simplify = false, false end + data.cache_uuid = os.uuid() if caches.direct then file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex else @@ -8625,7 +10476,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-res'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8660,6 +10511,14 @@ prefixes.relative = function(str,n) return resolvers.clean_path(str) end +prefixes.auto = function(str) + local fullname = prefixes.relative(str) + if not lfs.isfile(fullname) then + fullname = prefixes.locate(str) + end + return fullname +end + prefixes.locate = function(str) local fullname = resolvers.find_given_file(str) or "" return resolvers.clean_path((fullname ~= "" and fullname) or str) @@ -8683,6 +10542,16 @@ prefixes.full = prefixes.locate prefixes.file = prefixes.filename prefixes.path = prefixes.pathname +function resolvers.allprefixes(separator) + local all = table.sortedkeys(prefixes) + if separator then + for i=1,#all do + all[i] = all[i] .. ":" + end + end + return all +end + local function _resolve_(method,target) if prefixes[method] then return prefixes[method](target) @@ -8693,7 +10562,8 @@ end local function resolve(str) if type(str) == "table" then - for k, v in pairs(str) do -- ipairs + for k=1,#str do + local v = str[k] str[k] = resolve(v) or v end elseif str and str ~= "" then @@ -8706,7 +10576,7 @@ resolvers.resolve = resolve if os.uname then - for k, v in pairs(os.uname()) do + for k, v in next, os.uname() do if not prefixes[k] then prefixes[k] = function() return v end end @@ -8721,7 +10591,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8742,7 +10612,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-out'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8758,7 +10628,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-con'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8769,8 +10639,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) --[[ldx--

Once we found ourselves defining similar cache constructs @@ -8834,7 +10702,7 @@ end function containers.is_valid(container, name) if name and name ~= "" then local storage = container.storage[name] - return storage and not table.is_empty(storage) and storage.cache_version == container.version + return storage and storage.cache_version == container.version else return false end @@ -8886,16 +10754,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-use'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, lower, gsub = string.format, string.lower, string.gsub +local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -- since we want to use the cache instead of the tree, we will now -- reimplement the saver. @@ -8939,19 +10806,20 @@ resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT')) - if table.is_empty(mountpaths) and usecache then + if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = { caches.setpath("mount") } end - if not table.is_empty(mountpaths) then + if mountpaths and #mountpaths > 0 then statistics.starttiming(resolvers.instance) - for k, root in pairs(mountpaths) do + for k=1,#mountpaths do + local root = mountpaths[k] local f = io.open(root.."/url.tmi") if f then for line in f:lines() do if line then - if line:find("^[%%#%-]") then -- or %W + if find(line,"^[%%#%-]") then -- or %W -- skip - elseif line:find("^zip://") then + elseif find(line,"^zip://") then if trace_locating then logs.report("fileio","mounting %s",line) end @@ -8996,11 +10864,13 @@ function statistics.check_fmt_status(texname) local luv = dofile(luvname) if luv and luv.sourcefile then local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown") - if luv.enginebanner and luv.enginebanner ~= enginebanner then - return "engine mismatch" + local luvbanner = luv.enginebanner or "?" + if luvbanner ~= enginebanner then + return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner) end - if luv.sourcehash and luv.sourcehash ~= sourcehash then - return "source mismatch" + local luvhash = luv.sourcehash or "?" + if luvhash ~= sourcehash then + return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash) end else return "invalid status file" @@ -9019,18 +10889,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-zip'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, find = string.format, string.find +local format, find, match = string.format, string.find, string.match +local unpack = unpack or table.unpack -local trace_locating, trace_verbose = false, false +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trace_verbose = v end) +-- zip:///oeps.zip?name=bla/bla.tex +-- zip:///oeps.zip?tree=tex/texmf-local +-- zip:///texmf.zip?tree=/tex/texmf +-- zip:///texmf.zip?tree=/tex/texmf-local +-- zip:///texmf-mine.zip?tree=/tex/texmf-projects zip = zip or { } zip.archives = zip.archives or { } @@ -9041,9 +10915,6 @@ local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, local archives = zip.archives --- zip:///oeps.zip?name=bla/bla.tex --- zip:///oeps.zip?tree=tex/texmf-local - local function validzip(str) -- todo: use url splitter if not find(str,"^zip://") then return "zip:///" .. str @@ -9073,26 +10944,22 @@ function zip.closearchive(name) end end --- zip:///texmf.zip?tree=/tex/texmf --- zip:///texmf.zip?tree=/tex/texmf-local --- zip:///texmf-mine.zip?tree=/tex/texmf-projects - function locators.zip(specification) -- where is this used? startup zips (untested) specification = resolvers.splitmethod(specification) local zipfile = specification.path local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree if trace_locating then if zfile then - logs.report("fileio",'! zip locator, found: %s',specification.original) + logs.report("fileio","zip locator, archive '%s' found",specification.original) else - logs.report("fileio",'? zip locator, not found: %s',specification.original) + logs.report("fileio","zip locator, archive '%s' not found",specification.original) end end end function hashers.zip(tag,name) - if trace_verbose then - logs.report("fileio","loading zip file %s as %s",name,tag) + if trace_locating then + logs.report("fileio","loading zip file '%s' as '%s'",name,tag) end resolvers.usezipfile(format("%s?tree=%s",tag,name)) end @@ -9117,23 +10984,25 @@ function finders.zip(specification,filetype) local zfile = zip.openarchive(specification.path) if zfile then if trace_locating then - logs.report("fileio",'! zip finder, path: %s',specification.path) + logs.report("fileio","zip finder, archive '%s' found",specification.path) end local dfile = zfile:open(q.name) if dfile then dfile = zfile:close() if trace_locating then - logs.report("fileio",'+ zip finder, name: %s',q.name) + logs.report("fileio","zip finder, file '%s' found",q.name) end return specification.original + elseif trace_locating then + logs.report("fileio","zip finder, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'? zip finder, path %s',specification.path) + logs.report("fileio","zip finder, unknown archive '%s'",specification.path) end end end if trace_locating then - logs.report("fileio",'- zip finder, name: %s',filename) + logs.report("fileio","zip finder, '%s' not found",filename) end return unpack(finders.notfound) end @@ -9146,20 +11015,25 @@ function openers.zip(specification) local zfile = zip.openarchive(zipspecification.path) if zfile then if trace_locating then - logs.report("fileio",'+ zip starter, path: %s',zipspecification.path) + logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path) end local dfile = zfile:open(q.name) if dfile then logs.show_open(specification) + if trace_locating then + logs.report("fileio","zip opener, file '%s' found",q.name) + end return openers.text_opener(specification,dfile,'zip') + elseif trace_locating then + logs.report("fileio","zip opener, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'- zip starter, path %s',zipspecification.path) + logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path) end end end if trace_locating then - logs.report("fileio",'- zip opener, name: %s',filename) + logs.report("fileio","zip opener, '%s' not found",filename) end return unpack(openers.notfound) end @@ -9172,25 +11046,27 @@ function loaders.zip(specification) local zfile = zip.openarchive(specification.path) if zfile then if trace_locating then - logs.report("fileio",'+ zip starter, path: %s',specification.path) + logs.report("fileio","zip loader, archive '%s' opened",specification.path) end local dfile = zfile:open(q.name) if dfile then logs.show_load(filename) if trace_locating then - logs.report("fileio",'+ zip loader, name: %s',filename) + logs.report("fileio","zip loader, file '%s' loaded",filename) end local s = dfile:read("*all") dfile:close() return true, s, #s + elseif trace_locating then + logs.report("fileio","zip loader, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'- zip starter, path: %s',specification.path) + logs.report("fileio","zip loader, unknown archive '%s'",specification.path) end end end if trace_locating then - logs.report("fileio",'- zip loader, name: %s',filename) + logs.report("fileio","zip loader, '%s' not found",filename) end return unpack(openers.notfound) end @@ -9200,21 +11076,15 @@ end function resolvers.usezipfile(zipname) zipname = validzip(zipname) - if trace_locating then - logs.report("fileio",'! zip use, file: %s',zipname) - end local specification = resolvers.splitmethod(zipname) local zipfile = specification.path if zipfile and not zip.registeredfiles[zipname] then local tree = url.query(specification.query).tree or "" - if trace_locating then - logs.report("fileio",'! zip register, file: %s',zipname) - end local z = zip.openarchive(zipfile) if z then local instance = resolvers.instance if trace_locating then - logs.report("fileio","= zipfile, registering: %s",zipname) + logs.report("fileio","zip registering, registering archive '%s'",zipname) end statistics.starttiming(instance) resolvers.prepend_hash('zip',zipname,zipfile) @@ -9223,10 +11093,10 @@ function resolvers.usezipfile(zipname) instance.files[zipname] = resolvers.register_zip_file(z,tree or "") statistics.stoptiming(instance) elseif trace_locating then - logs.report("fileio","? zipfile, unknown: %s",zipname) + logs.report("fileio","zip registering, unknown archive '%s'",zipname) end elseif trace_locating then - logs.report("fileio",'! zip register, no file: %s',zipname) + logs.report("fileio","zip registering, '%s' not found",zipname) end end @@ -9238,11 +11108,11 @@ function resolvers.register_zip_file(z,tree) filter = format("^%s/(.+)/(.-)$",tree) end if trace_locating then - logs.report("fileio",'= zip filter: %s',filter) + logs.report("fileio","zip registering, using filter '%s'",filter) end local register, n = resolvers.register_file, 0 for i in z:files() do - local path, name = i.filename:match(filter) + local path, name = match(i.filename,filter) if path then if name and name ~= '' then register(files, name, path) @@ -9255,7 +11125,7 @@ function resolvers.register_zip_file(z,tree) n = n + 1 end end - logs.report("fileio",'= zip entries: %s',n) + logs.report("fileio","zip registering, %s files registered",n) return files end @@ -9266,12 +11136,14 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-crl'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local gsub = string.gsub + curl = curl or { } curl.cached = { } @@ -9280,9 +11152,9 @@ curl.cachepath = caches.definepath("curl") local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders function curl.fetch(protocol, name) - local cachename = curl.cachepath() .. "/" .. name:gsub("[^%a%d%.]+","-") --- cachename = cachename:gsub("[\\/]", io.fileseparator) - cachename = cachename:gsub("[\\]", "/") -- cleanup + local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-") +-- cachename = gsub(cachename,"[\\/]", io.fileseparator) + cachename = gsub(cachename,"[\\]", "/") -- cleanup if not curl.cached[name] then if not io.exists(cachename) then curl.cached[name] = cachename @@ -9324,6 +11196,164 @@ curl.install('https') curl.install('ftp') +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['data-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- some loading stuff ... we might move this one to slot 2 depending +-- on the developments (the loaders must not trigger kpse); we could +-- of course use a more extensive lib path spec + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local gsub, insert = string.gsub, table.insert +local unpack = unpack or table.unpack + +local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs' +local clibformats = { 'lib' } + +local _path_, libpaths, _cpath_, clibpaths + +function package.libpaths() + if not _path_ or package.path ~= _path_ then + _path_ = package.path + libpaths = file.split_path(_path_,";") + end + return libpaths +end + +function package.clibpaths() + if not _cpath_ or package.cpath ~= _cpath_ then + _cpath_ = package.cpath + clibpaths = file.split_path(_cpath_,";") + end + return clibpaths +end + +local function thepath(...) + local t = { ... } t[#t+1] = "?.lua" + local path = file.join(unpack(t)) + if trace_locating then + logs.report("fileio","! appending '%s' to 'package.path'",path) + end + return path +end + +local p_libpaths, a_libpaths = { }, { } + +function package.append_libpath(...) + insert(a_libpath,thepath(...)) +end + +function package.prepend_libpath(...) + insert(p_libpaths,1,thepath(...)) +end + +-- beware, we need to return a loadfile result ! + +local function loaded(libpaths,name,simple) + for i=1,#libpaths do -- package.path, might become option + local libpath = libpaths[i] + local resolved = gsub(libpath,"%?",simple) + if trace_locating then -- more detail + logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved) + end + return loadfile(resolved) + end + end +end + + +package.loaders[2] = function(name) -- was [#package.loaders+1] + if trace_locating then -- mode detail + logs.report("fileio","! locating '%s'",name) + end + for i=1,#libformats do + local format = libformats[i] + local resolved = resolvers.find_file(name,format) or "" + if trace_locating then -- mode detail + logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format) + end + if resolved ~= "" then + if trace_locating then + logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved) + end + return loadfile(resolved) + end + end + -- libpaths + local libpaths, clibpaths = package.libpaths(), package.clibpaths() + local simple = gsub(name,"%.lua$","") + local simple = gsub(simple,"%.","/") + local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple) + if resolved then + return resolved + end + -- + local libname = file.addsuffix(simple,os.libsuffix) + for i=1,#clibformats do + -- better have a dedicated loop + local format = clibformats[i] + local paths = resolvers.expanded_path_list_from_var(format) + for p=1,#paths do + local path = paths[p] + local resolved = file.join(path,libname) + if trace_locating then -- mode detail + logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved) + end + return package.loadlib(resolved,name) + end + end + end + for i=1,#clibpaths do -- package.path, might become option + local libpath = clibpaths[i] + local resolved = gsub(libpath,"?",simple) + if trace_locating then -- more detail + logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved) + end + return package.loadlib(resolved,name) + end + end + -- just in case the distribution is messed up + if trace_loading then -- more detail + logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name) + end + local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or "" + if resolved ~= "" then + if trace_locating then + logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved) + end + return loadfile(resolved) + end + if trace_locating then + logs.report("fileio",'? unable to locate lib: %s',name) + end +-- return "unable to locate " .. name +end + +resolvers.loadlualib = require + + end -- of closure do -- create closure to overcome 200 locals limit @@ -9437,7 +11467,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -9445,47 +11475,47 @@ if not modules then modules = { } end modules ['data-aux'] = { local find = string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") local oldscript = resolvers.clean_path(oldname) - if trace_verbose then + if trace_locating then logs.report("fileio","to be replaced old script %s", oldscript) end local newscripts = resolvers.find_files(newname) or { } if #newscripts == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","unable to locate new script") end else for i=1,#newscripts do local newscript = resolvers.clean_path(newscripts[i]) - if trace_verbose then + if trace_locating then logs.report("fileio","checking new script %s", newscript) end if oldscript == newscript then - if trace_verbose then + if trace_locating then logs.report("fileio","old and new script are the same") end elseif not find(newscript,scriptpath) then - if trace_verbose then + if trace_locating then logs.report("fileio","new script should come from %s",scriptpath) end elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then - if trace_verbose then + if trace_locating then logs.report("fileio","invalid new script name") end else local newdata = io.loaddata(newscript) if newdata then - if trace_verbose then + if trace_locating then logs.report("fileio","old script content replaced by new content") end io.savedata(oldscript,newdata) break - elseif trace_verbose then + elseif trace_locating then logs.report("fileio","unable to load new script") end end @@ -9500,25 +11530,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local find, gsub, match = string.find, string.gsub, string.match +local getenv, setenv = os.getenv, os.setenv + -- loads *.tmf files in minimal tree roots (to be optimized and documented) function resolvers.check_environment(tree) logs.simpleline() - os.setenv('TMP', os.getenv('TMP') or os.getenv('TEMP') or os.getenv('TMPDIR') or os.getenv('HOME')) - os.setenv('TEXOS', os.getenv('TEXOS') or ("texmf-" .. os.currentplatform())) - os.setenv('TEXPATH', (tree or "tex"):gsub("\/+$",'')) - os.setenv('TEXMFOS', os.getenv('TEXPATH') .. "/" .. os.getenv('TEXOS')) + setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME')) + setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform)) + setenv('TEXPATH', gsub(tree or "tex","\/+$",'')) + setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS')) logs.simpleline() - logs.simple("preset : TEXPATH => %s", os.getenv('TEXPATH')) - logs.simple("preset : TEXOS => %s", os.getenv('TEXOS')) - logs.simple("preset : TEXMFOS => %s", os.getenv('TEXMFOS')) - logs.simple("preset : TMP => %s", os.getenv('TMP')) + logs.simple("preset : TEXPATH => %s", getenv('TEXPATH')) + logs.simple("preset : TEXOS => %s", getenv('TEXOS')) + logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS')) + logs.simple("preset : TMP => %s", getenv('TMP')) logs.simple('') end @@ -9526,27 +11559,27 @@ function resolvers.load_environment(name) -- todo: key=value as well as lua local f = io.open(name) if f then for line in f:lines() do - if line:find("^[%%%#]") then + if find(line,"^[%%%#]") then -- skip comment else - local key, how, value = line:match("^(.-)%s*([<=>%?]+)%s*(.*)%s*$") + local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$") if how then - value = value:gsub("%%(.-)%%", function(v) return os.getenv(v) or "" end) + value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end) if how == "=" or how == "<<" then - os.setenv(key,value) + setenv(key,value) elseif how == "?" or how == "??" then - os.setenv(key,os.getenv(key) or value) + setenv(key,getenv(key) or value) elseif how == "<" or how == "+=" then - if os.getenv(key) then - os.setenv(key,os.getenv(key) .. io.fileseparator .. value) + if getenv(key) then + setenv(key,getenv(key) .. io.fileseparator .. value) else - os.setenv(key,value) + setenv(key,value) end elseif how == ">" or how == "=+" then - if os.getenv(key) then - os.setenv(key,value .. io.pathseparator .. os.getenv(key)) + if getenv(key) then + setenv(key,value .. io.pathseparator .. getenv(key)) else - os.setenv(key,value) + setenv(key,value) end end end @@ -9585,6 +11618,9 @@ if not modules then modules = { } end modules ['luat-sta'] = { -- this code is used in the updater +local gmatch, match = string.gmatch, string.match +local type = type + states = states or { } states.data = states.data or { } states.hash = states.hash or { } @@ -9613,13 +11649,17 @@ function states.set_by_tag(tag,key,value,default,persistent) if d then if type(d) == "table" then local dkey, hkey = key, key - local pre, post = key:match("(.+)%.([^%.]+)$") + local pre, post = match(key,"(.+)%.([^%.]+)$") if pre and post then - for k in pre:gmatch("[^%.]+") do + for k in gmatch(pre,"[^%.]+") do local dk = d[k] if not dk then dk = { } d[k] = dk + elseif type(dk) == "string" then + -- invalid table, unable to upgrade structure + -- hope for the best or delete the state file + break end d = dk end @@ -9647,7 +11687,7 @@ function states.get_by_tag(tag,key,default) else local d = states.data[tag] if d then - for k in key:gmatch("[^%.]+") do + for k in gmatch(key,"[^%.]+") do local dk = d[k] if dk then d = dk @@ -9782,6 +11822,7 @@ own.libs = { -- todo: check which ones are really needed 'l-os.lua', 'l-file.lua', 'l-md5.lua', + 'l-url.lua', 'l-dir.lua', 'l-boolean.lua', 'l-math.lua', @@ -9790,11 +11831,13 @@ own.libs = { -- todo: check which ones are really needed 'l-utils.lua', 'l-aux.lua', -- 'l-xml.lua', + 'trac-tra.lua', 'lxml-tab.lua', - 'lxml-pth.lua', - 'lxml-ent.lua', + 'lxml-lpt.lua', +-- 'lxml-ent.lua', 'lxml-mis.lua', - 'trac-tra.lua', + 'lxml-aux.lua', + 'lxml-xml.lua', 'luat-env.lua', 'trac-inf.lua', 'trac-log.lua', @@ -9809,7 +11852,7 @@ own.libs = { -- todo: check which ones are really needed -- 'data-bin.lua', 'data-zip.lua', 'data-crl.lua', --- 'data-lua.lua', + 'data-lua.lua', 'data-kps.lua', -- so that we can replace kpsewhich 'data-aux.lua', -- updater 'data-tmf.lua', -- tree files @@ -9827,7 +11870,8 @@ end -- End of hack. -own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua' +own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua' + own.path = string.match(own.name,"^(.+)[\\/].-$") or "." own.list = { '.' } @@ -9865,18 +11909,25 @@ if not resolvers then os.exit() end -logs.setprogram('MTXrun',"TDS Runner Tool 1.22",environment.arguments["verbose"] or false) +logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false) local instance = resolvers.reset() +local trackspec = environment.argument("trackers") or environment.argument("track") + +if trackspec then + trackers.enable(trackspec) +end + runners = runners or { } -- global messages = messages or { } messages.help = [[ ---script run an mtx script (--noquotes) ---execute run a script or program (--noquotes) +--script run an mtx script (lua prefered method) (--noquotes), no script gives list +--execute run a script or program (texmfstart method) (--noquotes) --resolve resolve prefixed arguments --ctxlua run internally (using preloaded libs) +--internal run script using built in libraries (same as --ctxlua) --locate locate given filename --autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree' @@ -9893,16 +11944,20 @@ messages.help = [[ --unix create unix (linux) stubs --verbose give a bit more info +--trackers=list enable given trackers --engine=str target engine --progname=str format or backend --edit launch editor with found file --launch (--all) launch files like manuals, assumes os support ---intern run script using built in libraries +--timedrun run a script an time its run +--autogenerate regenerate databases if needed (handy when used to run context in an editor) + +--usekpse use kpse as fallback (when no mkiv and cache installed, often slower) +--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality) ---usekpse use kpse as fallback (when no mkiv and cache installed, often slower) ---forcekpse force using kpse (handy when no mkiv and cache installed but less functionality) +--prefixes show supported prefixes ]] runners.applications = { @@ -9918,20 +11973,17 @@ runners.suffixes = { } runners.registered = { - texexec = { 'texexec.rb', true }, -- context mkii runner (only tool not to be luafied) + texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied) texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it) texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied - -- texwork = { \texwork.pl', false }, -- perltk based editing environment, only used at pragma - + -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma makempy = { 'makempy.pl', true }, mptopdf = { 'mptopdf.pl', true }, pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced) - -- examplex = { 'examplex.rb', false }, concheck = { 'concheck.rb', false }, - runtools = { 'runtools.rb', true }, textools = { 'textools.rb', true }, tmftools = { 'tmftools.rb', true }, @@ -9943,7 +11995,6 @@ runners.registered = { xmltools = { 'xmltools.rb', true }, -- luatools = { 'luatools.lua', true }, mtxtools = { 'mtxtools.rb', true }, - pdftrimwhite = { 'pdftrimwhite.pl', false } } @@ -9952,6 +12003,13 @@ runners.launchers = { unix = { } } +-- like runners.libpath("framework"): looks on script's subpath + +function runners.libpath(...) + package.prepend_libpath(file.dirname(environment.ownscript),...) + package.prepend_libpath(file.dirname(environment.ownname) ,...) +end + function runners.prepare() local checkname = environment.argument("ifchanged") if checkname and checkname ~= "" then @@ -9996,7 +12054,7 @@ function runners.prepare() return "run" end -function runners.execute_script(fullname,internal) +function runners.execute_script(fullname,internal,nosplit) local noquote = environment.argument("noquotes") if fullname and fullname ~= "" then local state = runners.prepare() @@ -10036,17 +12094,20 @@ function runners.execute_script(fullname,internal) end end if result and result ~= "" then - local before, after = environment.split_arguments(fullname) -- already done - environment.arguments_before, environment.arguments_after = before, after + if not no_split then + local before, after = environment.split_arguments(fullname) -- already done + environment.arguments_before, environment.arguments_after = before, after + end if internal then - arg = { } for _,v in pairs(after) do arg[#arg+1] = v end + arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end + environment.ownscript = result dofile(result) else local binary = runners.applications[file.extname(result)] if binary and binary ~= "" then result = binary .. " " .. result end - local command = result .. " " .. environment.reconstruct_commandline(after,noquote) + local command = result .. " " .. environment.reconstruct_commandline(environment.arguments_after,noquote) if logs.verbose then logs.simpleline() logs.simple("executing: %s",command) @@ -10054,8 +12115,24 @@ function runners.execute_script(fullname,internal) logs.simpleline() io.flush() end - local code = os.exec(command) -- maybe spawn - return code == 0 + -- no os.exec because otherwise we get the wrong return value + local code = os.execute(command) -- maybe spawn + if code == 0 then + return true + else + if binary then + binary = file.addsuffix(binary,os.binsuffix) + for p in string.gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do + if lfs.isfile(file.join(p,binary)) then + return false + end + end + logs.simpleline() + logs.simple("This script needs '%s' which seems not to be installed.",binary) + logs.simpleline() + end + return false + end end end end @@ -10088,7 +12165,7 @@ function runners.execute_program(fullname) return false end --- the --usekpse flag will fallback on kpse +-- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs) local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010' local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010' @@ -10143,7 +12220,7 @@ function runners.locate_file(filename) end function runners.locate_platform() - runners.report_location(os.currentplatform()) + runners.report_location(os.platform) end function runners.report_location(result) @@ -10176,7 +12253,8 @@ end function runners.save_script_session(filename, list) local t = { } - for _, key in ipairs(list) do + for i=1,#list do + local key = list[i] t[key] = environment.arguments[key] end io.savedata(filename,table.serialize(t,true)) @@ -10265,20 +12343,22 @@ function runners.find_mtx_script(filename) if fullname and fullname ~= "" then return fullname end + -- mtx- prefix checking + local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-" -- context namespace, mtx- - fullname = "mtx-" .. filename + fullname = mtxprefix .. filename fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname end -- context namespace, mtx-s - fullname = "mtx-" .. basename .. "s" .. "." .. suffix + fullname = mtxprefix .. basename .. "s" .. "." .. suffix fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname end -- context namespace, mtx- - fullname = "mtx-" .. basename:gsub("s$","") .. "." .. suffix + fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname @@ -10288,9 +12368,17 @@ function runners.find_mtx_script(filename) return fullname end -function runners.execute_ctx_script(filename,arguments) +function runners.execute_ctx_script(filename) + local arguments = environment.arguments_after local fullname = runners.find_mtx_script(filename) or "" - -- retyr after generate but only if --autogenerate + if file.extname(fullname) == "cld" then + -- handy in editors where we force --autopdf + logs.simple("running cld script: %s",filename) + table.insert(arguments,1,fullname) + table.insert(arguments,"--autopdf") + fullname = runners.find_mtx_script("context") or "" + end + -- retry after generate but only if --autogenerate if fullname == "" and environment.argument("autogenerate") then -- might become the default instance.renewcache = true logs.setverbose(true) @@ -10319,32 +12407,51 @@ function runners.execute_ctx_script(filename,arguments) if logs.verbose then logs.simple("using script: %s\n",fullname) end + environment.ownscript = fullname dofile(fullname) local savename = environment.arguments['save'] - if savename and runners.save_list and not table.is_empty(runners.save_list or { }) then - if type(savename) ~= "string" then savename = file.basename(fullname) end - savename = file.replacesuffix(savename,"cfg") - runners.save_script_session(savename, runners.save_list) + if savename then + local save_list = runners.save_list + if save_list and next(save_list) then + if type(savename) ~= "string" then savename = file.basename(fullname) end + savename = file.replacesuffix(savename,"cfg") + runners.save_script_session(savename,save_list) + end end return true end else - logs.setverbose(true) - if filename == "" then - logs.simple("unknown script, no name given") + -- logs.setverbose(true) + if filename == "" or filename == "help" then local context = resolvers.find_file("mtx-context.lua") + logs.setverbose(true) if context ~= "" then local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed local valid = { } - for _, scriptname in ipairs(result) do - scriptname = string.match(scriptname,".*mtx%-([^%-]-)%.lua") - if scriptname then - valid[#valid+1] = scriptname + table.sort(result) + for i=1,#result do + local scriptname = result[i] + local scriptbase = string.match(scriptname,".*mtx%-([^%-]-)%.lua") + if scriptbase then + local data = io.loaddata(scriptname) + local banner, version = string.match(data,"[\n\r]logs%.extendbanner%s*%(%s*[\"\']([^\n\r]+)%s*(%d+%.%d+)") + if banner then + valid[#valid+1] = { scriptbase, version, banner } + end end end if #valid > 0 then - logs.simple("known scripts: %s",table.concat(valid,", ")) + logs.reportbanner() + logs.reportline() + logs.simple("no script name given, known scripts:") + logs.simple() + for k=1,#valid do + local v = valid[k] + logs.simple("%-12s %4s %s",v[1],v[2],v[3]) + end end + else + logs.simple("no script name given") end else filename = file.addsuffix(filename,"lua") @@ -10358,6 +12465,12 @@ function runners.execute_ctx_script(filename,arguments) end end +function runners.prefixes() + logs.reportbanner() + logs.reportline() + logs.simple(table.concat(resolvers.allprefixes(true)," ")) +end + function runners.timedrun(filename) -- just for me if filename and filename ~= "" then runners.timed(function() os.execute(filename) end) @@ -10385,7 +12498,9 @@ instance.lsrmode = environment.argument("lsr") or false -- maybe the unset has to go to this level -if environment.argument("usekpse") or environment.argument("forcekpse") then +local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))] + +if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then os.setenv("engine","") os.setenv("progname","") @@ -10420,7 +12535,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") then return (kpse_initialized():show_path(name)) or "" end - elseif environment.argument("usekpse") then + elseif environment.argument("usekpse") or is_mkii_stub then resolvers.load() @@ -10449,7 +12564,6 @@ else end - if environment.argument("selfmerge") then -- embed used libraries utils.merger.selfmerge(own.name,own.libs,own.list) @@ -10462,9 +12576,14 @@ elseif environment.argument("selfupdate") then elseif environment.argument("ctxlua") or environment.argument("internal") then -- run a script by loading it (using libs) ok = runners.execute_script(filename,true) -elseif environment.argument("script") or environment.argument("s") then +elseif environment.argument("script") or environment.argument("scripts") then -- run a script by loading it (using libs), pass args - ok = runners.execute_ctx_script(filename,after) + if is_mkii_stub then + -- execute mkii script + ok = runners.execute_script(filename,false,true) + else + ok = runners.execute_ctx_script(filename) + end elseif environment.argument("execute") then -- execute script ok = runners.execute_script(filename) @@ -10491,6 +12610,8 @@ elseif environment.argument("locate") then elseif environment.argument("platform")then -- locate platform runners.locate_platform() +elseif environment.argument("prefixes") then + runners.prefixes() elseif environment.argument("timedrun") then -- locate platform runners.timedrun(filename) @@ -10499,8 +12620,14 @@ elseif environment.argument("help") or filename=='help' or filename == "" then -- execute script elseif filename:find("^bin:") then ok = runners.execute_program(filename) +elseif is_mkii_stub then + -- execute mkii script + ok = runners.execute_script(filename,false,true) else - ok = runners.execute_script(filename) + ok = runners.execute_ctx_script(filename) + if not ok then + ok = runners.execute_script(filename) + end end if os.platform == "unix" then diff --git a/Master/texmf-dist/scripts/context/lua/mtxrun.rme b/Master/texmf-dist/scripts/context/lua/mtxrun.rme index 9cb56486aba..9850e389d74 100644 --- a/Master/texmf-dist/scripts/context/lua/mtxrun.rme +++ b/Master/texmf-dist/scripts/context/lua/mtxrun.rme @@ -1,3 +1,18 @@ -On MSWindows the mtxrun.lua script is called -with mtxrun.cmd. On Unix you can either rename -mtxrun.lua to mtxrun, or use a symlink. +On MSWindows the mtxrun.lua script is called with +mtxrun.exe. On Unix you can either rename mtxrun.lua +to mtxrun, or use a symlink. + +You can create additional stubs, like + +copy mtxrun.exe luatools.exe +copy mtxrun.exe texexec.exe +copy mtxrun.exe context.exe +copy mtxrun.exe mtx-server.exe + +The mtxrun.exe program is rather dump and only +intercepts mtxrun, luatools and texmfstart (for +old times sake) and passes the buck to mtxrun.lua +which happens to know enough of mkii to deal +with kpse based lookups and therefore acts like +texmfstart but when used with mkiv it behaves +more clever and looks for more. diff --git a/Master/texmf-dist/scripts/context/lua/scite-ctx.lua b/Master/texmf-dist/scripts/context/lua/scite-ctx.lua deleted file mode 100644 index 1b832928909..00000000000 --- a/Master/texmf-dist/scripts/context/lua/scite-ctx.lua +++ /dev/null @@ -1,843 +0,0 @@ --- version : 1.0.0 - 07/2005 (2008: lua 5.1) --- author : Hans Hagen - PRAGMA ADE - www.pragma-ade.com --- copyright : public domain or whatever suits --- remark : part of the context distribution, my first lua code - --- todo: name space for local functions - --- loading: scite-ctx.properties - --- # environment variable --- # --- # CTXSPELLPATH=t:/spell --- # --- # auto language detection --- # --- # % version =1.0 language=uk --- # - --- ext.lua.startup.script=$(SciteDefaultHome)/scite-ctx.lua --- --- # extension.$(file.patterns.context)=scite-ctx.lua --- # extension.$(file.patterns.example)=scite-ctx.lua --- --- # ext.lua.reset=1 --- # ext.lua.auto.reload=1 --- # ext.lua.startup.script=t:/lua/scite-ctx.lua --- --- ctx.menulist.default=\ --- wrap=wrap_text|\ --- unwrap=unwrap_text|\ --- sort=sort_text|\ --- document=document_text|\ --- quote=quote_text|\ --- compound=compound_text|\ --- check=check_text --- --- ctx.spellcheck.language=auto --- ctx.spellcheck.wordsize=4 --- ctx.spellcheck.wordpath=ENV(CTXSPELLPATH) --- --- ctx.spellcheck.wordfile.all=spell-uk.txt,spell-nl.txt --- --- ctx.spellcheck.wordfile.uk=spell-uk.txt --- ctx.spellcheck.wordfile.nl=spell-nl.txt --- ctx.spellcheck.wordsize.uk=4 --- ctx.spellcheck.wordsize.nl=4 --- --- command.name.21.*=CTX Action List --- command.subsystem.21.*=3 --- command.21.*=show_menu $(ctx.menulist.default) --- command.groupundo.21.*=yes --- command.shortcut.21.*=Shift+F11 --- --- command.name.22.*=CTX Check Text --- command.subsystem.22.*=3 --- command.22.*=check_text --- command.groupundo.22.*=yes --- command.shortcut.22.*=Ctrl+L --- --- command.name.23.*=CTX Wrap Text --- command.subsystem.23.*=3 --- command.23.*=wrap_text --- command.groupundo.23.*=yes --- command.shortcut.23.*=Ctrl+M --- --- # command.21.*=check_text --- # command.21.*=dofile e:\context\lua\scite-ctx.lua - --- generic functions - -local crlf = "\n" - -function traceln(str) - trace(str .. crlf) - io.flush() -end - -function table.found(tab, str) - local l, r, p - if #str == 0 then - return false - else - l, r = 1, #tab - while l <= r do - p = math.floor((l+r)/2) - if str < tab[p] then - r = p - 1 - elseif str > tab[p] then - l = p + 1 - else - return true - end - end - return false - end -end - -function string:grab(delimiter) - local list = {} - for snippet in self:gmatch(delimiter) do - list[#list+1] = snippet - end - return list -end - -function string:is_empty() - return not self:find("%S") -end - -function string:expand() - return (self:gsub("ENV%((%w+)%)", os.envvar)) -end - -function string:strip() - return (self:gsub("^%s*(.-)%s*$", "%1")) -end - -do - - local lower, gsub, sub = string.lower, string.gsub, string.sub - - function table.alphasort(list,i) - if i and i > 0 then - local function alphacmp(a,b) - return lower(gsub(sub(a,i),'0',' ')) < lower(gsub(sub(b,i),'0',' ')) - end - table.sort(list,alphacmp) - else - local function alphacmp(a,b) - return a:lower() < b:lower() - end - table.sort(list,alphacmp) - end - end - -end - -function io.exists(filename) - local ok, result, message = pcall(io.open,filename) - if result then - io.close(result) - return true - else - return false - end -end - -function os.envvar(str) - local s = os.getenv(str) - if s ~= '' then - return s - end - s = os.getenv(str:upper()) - if s ~= '' then - return s - end - s = os.getenv(str:lower()) - if s ~= '' then - return s - end -end - --- support functions, maybe editor namespace - --- function column_of_position(position) --- local line = editor:LineFromPosition(position) --- local oldposition = editor.CurrentPos --- local column = 0 --- editor:GotoPos(position) --- while editor.CurrentPos ~= 0 and line == editor:LineFromPosition(editor.CurrentPos) do --- editor:CharLeft() --- column = column + 1 --- end --- editor:GotoPos(oldposition) --- if line > 0 then --- return column -1 --- else --- return column --- end --- end - --- function line_of_position(position) --- return editor:LineFromPosition(position) --- end - -function extend_to_start() - local selectionstart = editor.SelectionStart - local selectionend = editor.SelectionEnd - local line = editor:LineFromPosition(selectionstart) - if line > 0 then - while line == editor:LineFromPosition(selectionstart-1) do - selectionstart = selectionstart - 1 - editor:SetSel(selectionstart,selectionend) - end - else - selectionstart = 0 - end - editor:SetSel(selectionstart,selectionend) - return selectionstart -end - -function extend_to_end() -- editor:LineEndExtend() does not work - local selectionstart = editor.SelectionStart - local selectionend = editor.SelectionEnd - local line = editor:LineFromPosition(selectionend) - while line == editor:LineFromPosition(selectionend+1) do - selectionend = selectionend + 1 - editor:SetSel(selectionstart,selectionend) - end - editor:SetSel(selectionstart,selectionend) - return selectionend -end - -function getfiletype() - local firstline = editor:GetLine(0) - if editor.Lexer == SCLEX_TEX then - return 'tex' - elseif editor.Lexer == SCLEX_XML then - return 'xml' - elseif firstline:find("^%%") then - return 'tex' - elseif firstline:find("^<%?xml") then - return 'xml' - else - return 'unknown' - end -end - --- inspired by LuaExt's scite_Files - -function get_dir_list(mask) - local f - if props['PLAT_GTK'] and props['PLAT_GTK'] ~= "" then - f = io.popen('ls -1 ' .. mask) - else - mask = mask:gsub('/','\\') - local tmpfile = 'scite-ctx.tmp' - local cmd = 'dir /b "' .. mask .. '" > ' .. tmpfile - os.execute(cmd) - f = io.open(tmpfile) - end - local files = {} - if not f then -- path check added - return files - end - for line in f:lines() do - files[#files+1] = line - end - f:close() - return files -end - --- banner - -do - - print("loading scite-ctx.lua definition file\n") - print("- see scite-ctx.properties for configuring info\n") - print("- ctx.spellcheck.wordpath set to " .. props['ctx.spellcheck.wordpath']) - if (props['ctx.spellcheck.wordpath']:lower()):find("ctxspellpath") then - if os.getenv('ctxspellpath') then - print("- ctxspellpath set to " .. os.getenv('CTXSPELLPATH')) - else - print("- 'ctxspellpath is not set") - end - print("- ctx.spellcheck.wordpath expands to " .. string.expand(props['ctx.spellcheck.wordpath'])) - end - print("\n- ctx.wraptext.length is set to " .. props['ctx.wraptext.length']) - if props['ctx.helpinfo'] ~= '' then - print("\n- key bindings:\n") - print(((string.strip(props['ctx.helpinfo'])):gsub("%s*\|%s*","\n"))) - end - print("\n- recognized first lines:\n") - print("xml ", 2) - -function wrap_text() - - -- We always go to the end of a line, so in fact some of - -- the variables set next are not needed. - - local length = props["ctx.wraptext.length"] - - if length == '' then length = 80 else length = tonumber(length) end - - local startposition = editor.SelectionStart - local endposition = editor.SelectionEnd - - if startposition == endposition then return end - - editor:LineEndExtend() - - startposition = editor.SelectionStart - endposition = editor.SelectionEnd - - -- local startline = line_of_position(startposition) - -- local endline = line_of_position(endposition) - -- local startcolumn = column_of_position(startposition) - -- local endcolumn = column_of_position(endposition) - -- - -- editor:SetSel(startposition,endposition) - - local startline = props['SelectionStartLine'] - local endline = props['SelectionEndLine'] - local startcolumn = props['SelectionStartColumn'] - 1 - local endcolumn = props['SelectionEndColumn'] - 1 - - local replacement = { } - local templine = '' - local indentation = string.rep(' ',startcolumn) - local selection = editor:GetSelText() - - selection = selection:gsub("[\n\r][\n\r]","\n") - selection = selection:gsub("\n\n+",' ' .. magicstring .. ' ') - selection = selection:gsub("^%s",'') - - for snippet in selection:gmatch("%S+") do - if snippet == magicstring then - replacement[#replacement+1] = templine - replacement[#replacement+1] = "" - templine = '' - elseif #templine + #snippet > length then - replacement[#replacement+1] = templine - templine = indentation .. snippet - elseif #templine == 0 then - templine = indentation .. snippet - else - templine = templine .. ' ' .. snippet - end - end - - replacement[#replacement+1] = templine - replacement[1] = replacement[1]:gsub("^%s+",'') - - if endcolumn == 0 then - replacement[#replacement+1] = "" - end - - editor:ReplaceSel(table.concat(replacement,"\n")) - -end - -function unwrap_text() - - local startposition = editor.SelectionStart - local endposition = editor.SelectionEnd - - if startposition == endposition then return end - - editor:HomeExtend() - editor:LineEndExtend() - - startposition = editor.SelectionStart - endposition = editor.SelectionEnd - - local magicstring = string.rep("", 2) - local selection = string.gsub(editor:GetSelText(),"[\n\r][\n\r]+", ' ' .. magicstring .. ' ') - local replacement = '' - - for snippet in selection:gmatch("%S+") do - if snippet == magicstring then - replacement = replacement .. "\n" - else - replacement = replacement .. snippet .. "\n" - end - end - - if endcolumn == 0 then replacement = replacement .. "\n" end - - editor:ReplaceSel(replacement) - -end - -function sort_text() - - local startposition = editor.SelectionStart - local endposition = editor.SelectionEnd - - if startposition == endposition then return end - - -- local startcolumn = column_of_position(startposition) - -- local endcolumn = column_of_position(endposition) - -- - -- editor:SetSel(startposition,endposition) - - local startline = props['SelectionStartLine'] - local endline = props['SelectionEndLine'] - local startcolumn = props['SelectionStartColumn'] - 1 - local endcolumn = props['SelectionEndColumn'] - 1 - - startposition = extend_to_start() - endposition = extend_to_end() - - local selection = string.gsub(editor:GetSelText(), "%s*$", '') - - list = string.grab(selection,"[^\n\r]+") - table.alphasort(list, startcolumn) - local replacement = table.concat(list, "\n") - - editor:GotoPos(startposition) - editor:SetSel(startposition,endposition) - - if endcolumn == 0 then replacement = replacement .. "\n" end - - editor:ReplaceSel(replacement) - -end - -function document_text() - - local startposition = editor.SelectionStart - local endposition = editor.SelectionEnd - - if startposition == endposition then return end - - startposition = extend_to_start() - endposition = extend_to_end() - - editor:SetSel(startposition,endposition) - - local filetype = getfiletype() - - local replacement = '' - for i = editor:LineFromPosition(startposition), editor:LineFromPosition(endposition) do - local str = editor:GetLine(i) - if filetype == 'xml' then - if str:find("^<%!%-%- .* %-%->%s*$") then - replacement = replacement .. str:gsub("^<%!%-%- (.*) %-%->(%s*)$","%1\n") - elseif not str:is_empty() then - replacement = replacement .. '\n" - else - replacement = replacement .. str - end - else - if str:find("^%%D%s+$") then - replacement = replacement .. "\n" - elseif str:find("^%%D ") then - replacement = replacement .. str:gsub("^%%D ",'') - else - replacement = replacement .. '%D ' .. str - end - end - end - - editor:ReplaceSel(replacement:gsub("[\n\r]$",'')) - -end - -function quote_text() - - local filetype, leftquotation, rightquotation = getfiletype(), '', '' - - if filetype == 'xml' then - leftquotation, rightquotation = "", "" - leftquote, rightquote = "", "" - else - leftquotation, rightquotation = "\\quotation {", "}" - leftquote, rightquote = "\\quote {", "}" - end - - local replacement = editor:GetSelText() - replacement = replacement.gsub("\`\`(.-)\'\'", leftquotation .. "%1" .. rightquotation) - replacement = replacement.gsub("\"(.-)\"", leftquotation .. "%1" .. rightquotation) - replacement = replacement.gsub("\`(.-)\'", leftquote .. "%1" .. rightquote ) - replacement = replacement.gsub("\'(.-)\'", leftquote .. "%1" .. rightquote ) - editor:ReplaceSel(replacement) - -end - -function compound_text() - - local filetype = getfiletype() - - if filetype == 'xml' then - editor:ReplaceSel(string.gsub(editor:GetSelText(),"(>[^<%-][^<%-]+)([-\/])(%w%w+)","%1%3")) - else - editor:ReplaceSel(string.gsub(editor:GetSelText(),"([^\|])([-\/]+)([^\|])","%1|%2|%3")) - end - -end - --- written while listening to Alanis Morissette's acoustic --- Jagged Little Pill and Tori Amos' Beekeeper after --- reinstalling on my good old ATH-7 - -local language = props["ctx.spellcheck.language"] -local wordsize = props["ctx.spellcheck.wordsize"] -local wordpath = props["ctx.spellcheck.wordpath"] - -if language == '' then language = 'uk' end -if wordsize == '' then wordsize = 4 else wordsize = tonumber(wordsize) end - -local wordfile = "" -local wordlist = {} -local worddone = 0 - --- we use wordlist as a hash so that we can add entries without the --- need to sort and also use a fast (built in) search - --- function kpsewhich_file(filename,filetype,progname) --- local progflag, typeflag = '', '' --- local tempname = os.tmpname() --- if progname then --- progflag = " --progname=" .. progname .. " " --- end --- if filetype then --- typeflag = " --format=" .. filetype .. " " --- end --- local command = "kpsewhich" .. progflag .. typeflag .. " " .. filename .. " > " .. tempname --- os.execute(command) --- for line in io.lines(tempname) do --- return string.gsub(line, "\s*$", '') --- end --- end - -function check_text() - - local dlanguage = props["ctx.spellcheck.language"] - local dwordsize = props["ctx.spellcheck.wordsize"] - local dwordpath = props["ctx.spellcheck.wordpath"] - - if dlanguage ~= '' then dlanguage = tostring(language) end - if dwordsize ~= '' then dwordsize = tonumber(wordsize) end - - local firstline, skipfirst = editor:GetLine(0), false - local filetype, wordskip, wordgood = getfiletype(), '', '' - - if filetype == 'tex' then - wordskip = "\\" - elseif filetype == 'xml' then - wordskip = "<" - wordgood = ">" - end - - if props["ctx.spellcheck.language"] == 'auto' then - if filetype == 'tex' then - -- % version =1.0 language=uk - firstline = firstline:gsub("^%%%s*",'') - firstline = firstline:gsub("%s*$",'') - for key, val in firstline:gmatch("(%w+)=(%w+)") do - if key == "language" then - language = val - traceln("auto document language " .. "'" .. language .. "' (tex)") - end - end - skipfirst = true - elseif filetype == 'xml' then - -- - firstline = firstline:gsub("^%<%?xml%s*", '') - firstline = firstline:gsub("%s*%?%>%s*$", '') - for key, val in firstline:gmatch("(%w+)=[\"\'](.-)[\"\']") do - if key == "language" then - language = val - traceln("auto document language " .. "'" .. language .. "' (xml)") - end - end - skipfirst = true - end - end - - local fname = props["ctx.spellcheck.wordfile." .. language] - local fsize = props["ctx.spellcheck.wordsize." .. language] - - if fsize ~= '' then wordsize = tonumber(fsize) end - - if fname ~= '' and fname ~= wordfile then - wordfile, worddone, wordlist = fname, 0, {} - for filename in wordfile:gmatch("[^%,]+") do - if wordpath ~= '' then - filename = string.expand(wordpath) .. '/' .. filename - end - if io.exists(filename) then - traceln("loading " .. filename) - for line in io.lines(filename) do - if not line:find("^[\%\#\-]") then - str = line:gsub("%s*$", '') - rawset(wordlist,str,true) - worddone = worddone + 1 - end - end - else - traceln("unknown file '" .. filename .."'") - end - end - traceln(worddone .. " words loaded") - end - - reset_text() - - if worddone == 0 then - traceln("no (valid) language or wordfile specified") - else - traceln("start checking") - if wordskip ~= '' then - traceln("ignoring " .. wordskip .. "..." .. wordgood) - end - local i, j, lastpos, startpos, endpos, snippet, len, first = 0, 0, -1, 0, 0, '', 0, 0 - local ok, skip, ch = false, false, '' - if skipfirst then first = #firstline end - for k = first, editor.TextLength do - ch = editor:textrange(k,k+1) - if wordgood ~= '' and ch == wordgood then - skip = false - elseif ch == wordskip then - skip = true - end - if ch:find("%w") and not ch:find("%d") then - if not skip then - if ok then - endpos = k - else - startpos = k - endpos = k - ok = true - end - end - elseif ok and not skip then - len = endpos - startpos + 1 - if len >= wordsize then - snippet = editor:textrange(startpos,endpos+1) - i = i + 1 - if wordlist[snippet] or wordlist[snippet:lower()] then -- table.found(wordlist,snippet) - j = j + 1 - else - editor:StartStyling(startpos,INDICS_MASK) - editor:SetStyling(len,INDIC2_MASK) -- INDIC0_MASK+2 - end - end - ok = false - elseif wordgood == '' then - skip = (ch == wordskip) - end - end - traceln(i .. " words checked, " .. (i-j) .. " errors") - end - -end - -function reset_text() - editor:StartStyling(0,INDICS_MASK) - editor:SetStyling(editor.TextLength,INDIC_PLAIN) -end - --- menu - -local menuactions = {} -local menufunctions = {} - -function UserListShow(menutrigger, menulist) - local menuentries = {} - local list = string.grab(menulist,"[^%|]+") - menuactions = {} - for i=1, #list do - if list[i] ~= '' then - for key, val in list[i]:gmatch("%s*(.+)=(.+)%s*") do - menuentries[#menuentries+1] = key - menuactions[key] = val - end - end - end - local menustring = table.concat(menuentries,'|') - if menustring == "" then - traceln("There are no templates defined for this file type.") - else - editor.AutoCSeparator = string.byte('|') - editor:UserListShow(menutrigger,menustring) - editor.AutoCSeparator = string.byte(' ') - end -end - -function OnUserListSelection(trigger,choice) - if menufunctions[trigger] and menuactions[choice] then - return menufunctions[trigger](menuactions[choice]) - else - return false - end -end - --- main menu - -local menutrigger = 12 - -function show_menu(menulist) - UserListShow(menutrigger, menulist) -end - -function process_menu(action) - if not action:find("%(%)$") then - assert(loadstring(action .. "()"))() - else - assert(loadstring(action))() - end -end - -menufunctions[12] = process_menu - --- templates - -local templatetrigger = 13 - -local ctx_template_paths = { "./ctx-templates", "../ctx-templates", "../../ctx-templates" } -local ctx_auto_templates = false -local ctx_template_list = "" - -local ctx_path_list = {} -local ctx_path_done = {} -local ctx_path_name = {} - -function ctx_list_loaded(path) - return ctx_path_list[path] and #ctx_path_list[path] > 0 -end - -function insert_template(templatelist) - if props["ctx.template.scan"] == "yes" then - local path = props["FileDir"] - local rescan = props["ctx.template.rescan"] == "yes" - local suffix = props["ctx.template.suffix." .. props["FileExt"]] -- alas, no suffix expansion here - local current = path .. "+" .. props["FileExt"] - if rescan then - print("re-scanning enabled") - end - ctx_template_list = "" - if not ctx_path_done[path] or rescan then - local pattern = "*.*" - for i, pathname in ipairs(ctx_template_paths) do - print("scanning " .. path:gsub("\\","/") .. "/" .. pathname) - ctx_path_name[path] = pathname - ctx_path_list[path] = get_dir_list(pathname .. "/" .. pattern) - if ctx_list_loaded(path) then - print("finished locating template files") - break - end - end - if ctx_list_loaded(path) then - print(#ctx_path_list[path] .. " template files found") - else - print("no template files found") - end - end - if ctx_list_loaded(path) then - ctx_template_list = "" - local pattern = "%." .. suffix .. "$" - local n = 0 - for j, filename in ipairs(ctx_path_list[path]) do - if filename:find(pattern) then - n = n + 1 - local menuname = filename:gsub("%..-$","") - if ctx_template_list ~= "" then - ctx_template_list = ctx_template_list .. "|" - end - ctx_template_list = ctx_template_list .. menuname .. "=" .. ctx_path_name[path] .. "/" .. filename - end - end - if not ctx_path_done[path] then - print(n .. " suitable template files found") - end - end - ctx_path_done[path] = true - if ctx_template_list == "" then - ctx_auto_templates = false - else - ctx_auto_templates = true - templatelist = ctx_template_list - end - else - ctx_auto_templates = false - end - if templatelist ~= "" then - UserListShow(templatetrigger, templatelist) - end -end - - --- ctx.template.[whatever].[filetype] --- ctx.template.[whatever].data.[filetype] --- ctx.template.[whatever].file.[filetype] --- ctx.template.[whatever].list.[filetype] - -function process_template_one(action) - local text = nil - if ctx_auto_templates then - local f = io.open(action,"r") - if f then - text = string.gsub(f:read("*all"),"\n$","") - f:close() - else - print("unable to auto load template file " .. text) - text = nil - end - end - if not text or text == "" then - text = props["ctx.template." .. action .. ".file"] - if not text or text == "" then - text = props["ctx.template." .. action .. ".data"] - if not text or text == "" then - text = props["ctx.template." .. action] - end - else - local f = io.open(text,"r") - if f then - text = string.gsub(f:read("*all"),"\n$","") - f:close() - else - print("unable to load template file " .. text) - text = nil - end - end - end - if text then - text = text:gsub("\\n","\n") - local pos = text:find("%?") - text = text:gsub("%?","") - editor:insert(editor.CurrentPos,text) - if pos then - editor.CurrentPos = editor.CurrentPos + pos - 1 - editor.SelectionStart = editor.CurrentPos - editor.SelectionEnd = editor.CurrentPos - editor:GotoPos(editor.CurrentPos) - end - end -end - -menufunctions[13] = process_template_one -menufunctions[14] = process_template_two - --- command.name.26.*=Open Logfile --- command.subsystem.26.*=3 --- command.26.*=open_log --- command.save.before.26.*=2 --- command.groupundo.26.*=yes --- command.shortcut.26.*=Ctrl+E - -function open_log() - scite.Open(props['FileName'] .. ".log") -end diff --git a/Master/texmf-dist/scripts/context/lua/x-ldx.lua b/Master/texmf-dist/scripts/context/lua/x-ldx.lua index af5c9c0c87e..e0f21d68cd9 100644 --- a/Master/texmf-dist/scripts/context/lua/x-ldx.lua +++ b/Master/texmf-dist/scripts/context/lua/x-ldx.lua @@ -6,7 +6,8 @@ itself serves as an example of using in combination with . I will rewrite this using lpeg once I have the time to study that nice new -subsystem. +subsystem. On the other hand, we cannot expect proper +ad for educational purposed the syntax migh be wrong. --ldx]]-- banner = "version 1.0.1 - 2007+ - PRAGMA ADE / CONTEXT" @@ -126,79 +127,8 @@ ldx.make_index = true function ldx.enhance(data) -- i need to use lpeg and then we can properly autoindent -) local e = ldx.escape - for _,v in pairs(data) do - if v.code then - local dqs, sqs, com, cmt, cod = { }, { }, { }, { }, e(v.code) - cod = cod:gsub('\\"', "##d##") - cod = cod:gsub("\\'", "##s##") - cod = cod:gsub("%-%-%[%[.-%]%]%-%-", function(s) - cmt[#cmt+1] = s - return "[[[[".. #cmt .."]]]]" - end) - cod = cod:gsub("%-%-([^\n]*)", function(s) - com[#com+1] = s - return "[[".. #com .."]]" - end) - cod = cod:gsub("(%b\"\")", function(s) - dqs[#dqs+1] = s:sub(2,-2) or "" - return "<<<<".. #dqs ..">>>>" - end) - cod = cod:gsub("(%b\'\')", function(s) - sqs[#sqs+1] = s:sub(2,-2) or "" - return "<<".. #sqs ..">>" - end) - cod = cod:gsub("(%a+)",function(key) - local class = ldx.keywords.reserved[key] - if class then - return "" .. key .. "" - else - return key - end - end) - cod = cod:gsub("<<<<(%d+)>>>>", function(s) - return "" .. dqs[tonumber(s)] .. "" - end) - cod = cod:gsub("<<(%d+)>>", function(s) - return "" .. sqs[tonumber(s)] .. "" - end) - cod = cod:gsub("%[%[%[%[(%d+)%]%]%]%]", function(s) - return cmt[tonumber(s)] - end) - cod = cod:gsub("%[%[(%d+)%]%]", function(s) - return "" .. com[tonumber(s)] .. "" - end) - cod = cod:gsub("##d##", "\\\"") - cod = cod:gsub("##s##", "\\\'") - if ldx.make_index then - local lines = cod:split("\n") - local f = "(function)%s+([%w%.]+)%s*%(" - for k,v in pairs(lines) do - -- functies - v = v:gsub(f,function(key, str) - return "" .. str .. "(" - end) - -- variables - v = v:gsub("^([%w][%w%,%s]-)(=[^=])",function(str, rest) - local t = string.split(str, ",%s*") - for k,v in pairs(t) do - t[k] = "" .. v .. "" - end - return table.join(t,", ") .. rest - end) - -- so far - lines[k] = v - end - v.code = table.concat(lines,"\n") - else - v.code = cod - end - end - end -end - -function ldx.enhance(data) -- i need to use lpeg and then we can properly autoindent -) - local e = ldx.escape - for _,v in pairs(data) do + for k=1,#data do + local v = data[k] if v.code then local dqs, sqs, com, cmt, cod = { }, { }, { }, { }, e(v.code) cod = cod:gsub('\\"', "##d##") @@ -244,7 +174,8 @@ function ldx.enhance(data) -- i need to use lpeg and then we can properly autoin if ldx.make_index then local lines = cod:split("\n") local f = "(function)%s+([%w%.]+)%s*%(" - for k,v in pairs(lines) do + for k=1,#lines do + local v = lines[k] -- functies v = v:gsub(f,function(key, str) return "" .. str .. "(" @@ -252,8 +183,8 @@ function ldx.enhance(data) -- i need to use lpeg and then we can properly autoin -- variables v = v:gsub("^([%w][%w%,%s]-)(=[^=])",function(str, rest) local t = string.split(str, ",%s*") - for k,v in pairs(t) do - t[k] = "" .. v .. "" + for k=1,#t do + t[k] = "" .. t[k] .. "" end return table.join(t,", ") .. rest end) @@ -276,14 +207,17 @@ and by calculating the indentation we also avoid space troubles. It also makes it possible to change the indentation afterwards. --ldx]]-- -function ldx.as_xml(data) +function ldx.as_xml(data) -- ldx: not needed local t, cmode = { }, false t[#t+1] = "\n" t[#t+1] = "\n\n" - for _,v in pairs(data) do -- ldx: not needed + for k=1,#data do + local v = data[k] if v.code and not v.code:is_empty() then t[#t+1] = "\n\n" - for k,v in pairs(v.code:split("\n")) do -- make this faster + local split = v.code:split("\n") + for k=1,#split do -- make this faster + local v = split[k] local a, b = v:find("^(%s+)") if v then v = v:gsub("[\n\r ]+$","") end if a and b then @@ -384,3 +318,5 @@ The main conversion call is: if arg and arg[1] then ldx.convert(arg[1],arg[2]) end + +--~ exit(1) diff --git a/Master/texmf-dist/scripts/context/perl/mptopdf.pl b/Master/texmf-dist/scripts/context/perl/mptopdf.pl index a6b946baa4d..41d1ae1f7ef 100644 --- a/Master/texmf-dist/scripts/context/perl/mptopdf.pl +++ b/Master/texmf-dist/scripts/context/perl/mptopdf.pl @@ -104,7 +104,7 @@ if (($pattern eq '')||($Help)) { my $error = system ($runner) ; if ($error) { print "\n$program : error while processing mp file\n" ; - exit ; + exit 1 ; } else { $pattern =~ s/\.mp$//io ; @files = glob "$pattern.*" ; @@ -131,7 +131,11 @@ foreach my $file (@files) { } else { $command = "$command \\\\relax $file" ; } - system($command) ; + my $error = system($command) ; + if ($error) { + print "\n$program : error while processing tex file\n" ; + exit 1 ; + } my $pdfsrc = basename($_).".pdf"; rename ($pdfsrc, "$_-$1.pdf") ; if (-e $pdfsrc) { diff --git a/Master/texmf-dist/scripts/context/perl/texshow.pl b/Master/texmf-dist/scripts/context/perl/texshow.pl deleted file mode 100644 index 629c28f99ec..00000000000 --- a/Master/texmf-dist/scripts/context/perl/texshow.pl +++ /dev/null @@ -1,956 +0,0 @@ -eval '(exit $?0)' && eval 'exec perl -w -S $0 ${1+"$@"}' && eval 'exec perl -w -S $0 $argv:q' - if 0; - -#D \module -#D [ file=texshow.pl, -#D version=2006.08.04, -#D title=TeXShow, -#D subtitle=showing \CONTEXT\ commands, -#D author=Taco Hoekwater, -#D date=\currentdate, -#D copyright={Taco Hoekwater}] - -#D Early 1999 \TEXSHOW\ showed up in the \CONTEXT\ distribution. At that time -#D the user interface was described in files named \type {setup*.tex}. The -#D program used a stripped down version of these definition files, generated -#D by \CONTEXT\ itself. \TEXSHOW\ shows you the commands, their (optional) -#D arguments, as well as the parameters and their values. For some five years -#D there was no need to change \TEXSHOW. However, when a few years ago we -#D started providing an \XML\ variant of the user interface definitions, Taco -#D came up with \TEXSHOW||\XML. Because Patricks \CONTEXT\ garden and tools -#D like \CTXTOOLS\ also use the \XML\ definitions, it's time to drop the old -#D \TEX\ based definitions and move forward. From now on Taco's version is the -#D one to be used. -#D -#D Hans Hagen - Januari 2005 -#D -#D ChangeLog: -#D \startitemize -#D \item Add keyboard bindings for quitting the app: Ctrl-q,Ctrl-x,Alt-F4 (2006/07/19) -#D \item Support for define --resolve (2006/08/04) -#D \stopitemize - -use strict; -use Getopt::Long ; -use XML::Parser; -use Data::Dumper; -use Tk; -use Tk::ROText ; -use Config; -use Time::HiRes; - -$Getopt::Long::passthrough = 1 ; # no error message -$Getopt::Long::autoabbrev = 1 ; # partial switch accepted - -my $ShowHelp = 0; -my $Debug = 0; -my $Editmode = 0; -my $Interface = 'cont-en'; -my $current_command; -my $current_interface; -my $current_part; -my @setup_files; - -my %setups; -my %commes; -my %descrs; -my %examps; -my %trees; -my %positions; -my %locations; -my %crosslinks; - - -&GetOptions - ( "help" => \$ShowHelp , - "interface=s" => \$Interface , - "debug" => \$Debug, - "edit" => \$Editmode) ; - -print "\n"; - -show('TeXShow-XML 0.3 beta','Taco Hoekwater 2006',"/"); - -print "\n"; - -if ($ShowHelp) { - show('--help','print this help'); - show('--interface=lg','primary interface'); - show('--debug','print debugging info'); - show('string','show info about command \'string\''); - show('string lg','show info about \'string\' in language \'lg\''); - print "\n"; - exit 0; -} - -my $command = $ARGV[0] || ''; -my $interface = $ARGV[1] || ''; -if ($interface =~ /^[a-z][a-z]$/i) { - $Interface = 'cont-' . lc($interface); -} elsif ($interface && $command =~ /^[a-z][a-z]$/i) { - show('debug',"switching '$interface' and '$command'"); - $Interface = 'cont-' . lc($command); - $command = $interface; -} - -if ($command =~ s/^\\//) { - show('debug','removed initial command backslash'); -} - -show('interface', $Interface); -if ($command){ - show ('command', "\\$command") ; -} - -print "\n"; - -show('status','searching for setup files'); - -my $setup_path; -my ($mainwindow,$interfaceframe,$partframe,$leftframe,$rightframe,$buttonframe); -my ($request,$listbox,$textwindow,%interfacebuttons,%partbuttons); - -my ($textfont,$userfont,$buttonfont); - -my $Part; - -if (setups_found($Interface)) { - $current_interface = ''; - $current_command = ''; - $current_part = 'Command'; - show('status','loading setups') ; - load_setups($Interface) ; - show ('status','initializing display') ; - initialize_display(); - change_setup(); - show_command ($command); - $mainwindow->deiconify(); - show ('status','entering main loop') ; - MainLoop () ; - show ('status','closing down') ; -} else { - show ('error','no setup files found') ; -} -print "\n"; - -sub initialize_display { - my $dosish = ($Config{'osname'} =~ /dos|win/i) ; - my $default_size = $dosish ? 9 : 12 ; - my $s_vertical = 30 ; - my $s_horizontal = 72 ; - my $c_horizontal = 24 ; - if (!$dosish) { - $textfont = "-adobe-courier-bold-r-normal--$default_size-120-75-75-m-70-iso8859-1" ; - $userfont = "-adobe-courier-bold-o-normal--$default_size-120-75-75-m-70-iso8859-1" ; - $buttonfont = "-adobe-helvetica-bold-r-normal--$default_size-120-75-75-p-69-iso8859-1"; - } else { - $textfont = "Courier $default_size " ; - $userfont = "Courier $default_size italic" ; - $buttonfont = "Helvetica $default_size bold " ; - } - $mainwindow = MainWindow -> new ( -title => 'ConTeXt commands' ) ; - $buttonframe = $mainwindow -> Frame () ; # buttons - $leftframe = $mainwindow -> Frame () ; # leftside - $rightframe = $mainwindow -> Frame(); - $request = $rightframe -> Entry (-font => $textfont, - -background => 'ivory1', - -width => $c_horizontal); - $listbox = $rightframe -> Scrolled ('Listbox', - -scrollbars => 'e', - -font => $textfont, - -width => $c_horizontal, - -selectbackground => 'gray', - -background => 'ivory1', - -selectmode => 'browse') ; - $textwindow = $leftframe -> Scrolled ('ROText', - -scrollbars => 'se', - -height => $s_vertical, - -width => $s_horizontal, - -wrap => 'none', - -background => 'ivory1', - -font => $textfont); - $interfaceframe = $leftframe -> Frame(); - $mainwindow -> withdraw() ; - $mainwindow -> resizable ('y', 'y') ; - foreach (@setup_files) { - $interfacebuttons{$_} = $buttonframe -> Radiobutton (-text => $_, - -value => $_, - -font => $buttonfont, - -selectcolor => 'ivory1', - -indicatoron => 0, - -command => \&change_setup, - -variable => \$Interface ); - - $interfacebuttons{$_} -> pack (-padx => '2p',-pady => '2p','-side' => 'left' ); - } - foreach (qw(Command Description Comments Examples)) { - $partbuttons{$_} = $interfaceframe -> Radiobutton (-text => $_, - -value => $_, - -font => $buttonfont, - -selectcolor => 'ivory1', - -indicatoron => 0, - -command => \&change_part, - -variable => \$Part ); - $partbuttons{$_} -> pack (-padx => '2p',-pady => '2p','-side' => 'left' ); - } - # global top - $buttonframe -> pack ( -side => 'top' , -fill => 'x' , -expand => 0 ) ; - # top in left - $interfaceframe -> pack ( -side => 'top' , -fill => 'x' , -expand => 0 ) ; - $textwindow -> pack ( -side => 'top' , -fill => 'both' , -expand => 1 ) ; - $leftframe -> pack ( -side => 'left' , -fill => 'both' , -expand => 1 ) ; - # right - $request -> pack ( -side => 'top' , -fill => 'x' ) ; - $listbox -> pack ( -side => 'bottom' , -fill => 'both' , -expand => 1 ) ; - $rightframe -> pack ( -side => 'right' , -fill => 'both' , -expand => 1 ) ; - $listbox -> bind ('', \&show_command ) ; - $listbox -> bind ('<1>' , \&show_command ) ; - $listbox -> bind ('' , \&show_command ) ; - $textwindow -> tag ('configure', 'user' , -font => $userfont ) ; - $textwindow -> tag ('configure', 'optional' , -font => $userfont ) ; - $textwindow -> tag ('configure', 'command' , -foreground => 'green3' ) ; - $textwindow -> tag ('configure', 'variable' , -font => $userfont ) ; - $textwindow -> tag ('configure', 'default' , -underline => 1 ) ; - $textwindow -> tag ('configure', 'symbol' , -foreground => 'blue3' ) ; - $textwindow -> tag ('configure', 'or' , -foreground => 'yellow3' ) ; - $textwindow -> tag ('configure', 'argument' , -foreground => 'red3' ) ; - $textwindow -> tag ('configure', 'par' , -lmargin1 => '4m' , - -wrap => 'word' , - -lmargin2 => '6m' ) ; - foreach my $chr ('a'..'z','A'..'Z') { - $mainwindow -> bind ( "", sub { insert_request(shift, $chr) } ); - } - $request -> bind ('', sub { handle_request() } ) ; - $mainwindow -> bind ( "", sub { insert_request(shift, "\\") } ) ; - $mainwindow -> bind ( "", sub { new_request() } ) ; - $mainwindow -> bind ( "", sub { delete_request() } ) ; - $mainwindow -> bind ( "", sub { prev_command() } ) ; - $mainwindow -> bind ( "", sub { next_command() } ) ; - $mainwindow -> bind ( "", sub { exit(0) } ) ; - $mainwindow -> bind ( "", sub { exit(0) } ) ; - $mainwindow -> bind ( "", sub { exit(0) } ) ; - $mainwindow -> bind ( "", sub { exit(0) } ) ; - $mainwindow -> bind ( "", sub { exit(0) } ) ; -} - -sub show { - my ($pre,$post,$sep) = @_; - unless ($pre eq 'debug' && !$Debug) { - $sep = ':' unless defined $sep; - print sprintf("%22s $sep %+s\n",$pre,$post); - } -} - -sub change_setup { - # switches to another setup file - if ($current_interface ne $Interface ) { - my $loc = 0; - if ($current_command) { - $loc = $positions{$Interface}{$current_command} || 0; - } - my @list = sort {lc $a cmp lc $b} keys %{$setups{$Interface}} ; - my $num = 0; - map { $locations{$Interface}{$_} = $num++; } @list; - $listbox -> delete ('0.0', 'end') ; - $listbox -> insert ('end', @list) ; - # try to switch to other command as well, here. - if ($current_command ne '') { - show_command($crosslinks{$Interface}[$loc] || ''); - } else { - $listbox -> selectionSet ('0.0', '0.0') ; - $listbox -> activate ('0.0') ; - } - } - $current_interface = $Interface; - $mainwindow -> focus ; -} - -sub change_part { - if ($Part ne $current_part) { - if($Part eq 'Command') { - show_command(); - } elsif ($Part eq 'Description') { - show_description(); - } elsif ($Part eq 'Comments') { - show_comments(); - } elsif ($Part eq 'Examples') { - show_examples(); - } - } - $current_part = $Part; -} - - -sub setups_found { - # find the setup files - my ($primary) = @_; - $setup_path = `kpsewhich --progname=context cont-en.xml` ; - chomp $setup_path; - show ('debug', "path = '$setup_path'"); - if ($setup_path) { - $setup_path =~ s/cont-en\.xml.*// ; - @setup_files = glob ("${setup_path}cont\-??.xml") ; # HH: pattern patched, too greedy - show ('debug', "globbed path into '@setup_files'"); - if (@setup_files) { - my $found = 0; - foreach (@setup_files) { - s/\.xml.*$//; - s/^.*?cont-/cont-/; - if ($_ eq $primary) { - $found = 1; - show ('debug', "found primary setup '$primary'"); - } else { - show ('debug', "found non-primary setup '$_'"); - } - } - if ($found) { - return 1; - } else { - show('error',"setup file for '$primary' not found, using 'cont-en'"); - $Interface = 'cont-en'; - return 1; - } - } else { - show('error',"setup file glob failed"); - } - } elsif ($!) { - show('error','kpsewhich not found'); - } else { - show('error','setup files not found'); - } - return 0; -} - -sub load_setup { - my ($path,$filename) = @_; - my $localdefs = {}; - unless (keys %{$setups{$filename}}) { - if (open(IN,"<${path}$filename.xml")) { - my $position = 0 ; - local $/ = ''; - while (my $data= ) { - next if $data =~ /\<\/cd:interface/; - if ($data =~ /\(.*?)!!sm) { - my $localdef = $2; - my $localval = $3; - $localdefs->{$localdef} = $localval; - } - } - # - if (keys %$localdefs) { - while ($data =~ //$localdefs->{$2}/ms; - } - } - $data =~ s/\s*\n\s*//g; - $data =~ /\/; - my $info = $1; - my ($name,$environment) = ('',''); - while ($info =~ s/^\s*(.*?)\s*=\s*(["'])(.*?)\2\s*//) { - my $a = $1; my $b = $3; - if ($a eq 'name') { - $name = $b; - } elsif ($a eq 'type') { - $environment = $b; - } - } - my $cmd = $name; - if ($environment) { - $cmd = "start" . $name; - } - $setups {$filename}{$cmd} = $data ; - $trees {$filename}{$cmd} = undef; - $positions {$filename}{$cmd} = ++$position ; - $crosslinks{$filename}[$position] = $cmd ; - } - close IN; - # now get explanations as well ... - my $explname = $filename; - $explname =~ s/cont-/expl-/; - my $extras = 0 ; - if (open(IN,"<${path}$explname.xml")) { - local $/ = ''; - while (my $data= ) { - if ($data =~ /\<\/cd:explanations/) { - next; - } - if ($data =~ /\/; - my $info = $1; - my ($name,$environment) = ('',''); - while ($info =~ s/^\s*(.*?)\s*=\s*(["'])(.*?)\2\s*//) { - my $a = $1; my $b = $3; - if ($a eq 'name') { - $name = $b; - } elsif ($a eq 'type') { - $environment = $b; - } - } - my $cmd = $name; - if ($environment) { - $cmd = "start" . $name; - } - my $comment = ''; - my $description = ''; - my @examples = (); - $data =~ /\(.*)\<\/cd:description\>/s and $description = $1; - $data =~ /\(.*)\<\/cd:comment\>/s and $comment = $1; - while ($data =~ s/\(.*?)\<\/cd:example\>//s) { - push @examples, $1; - } - if (length($comment) && $comment =~ /\S/) { - $commes {$filename}{$cmd} = $comment; - } - if (length($description) && $description =~ /\S/) { - $descrs {$filename}{$cmd} = $description; - } - my $testex = "@examples"; - if (length($testex) && $testex =~ /\S/) { - $examps {$filename}{$cmd} = [@examples]; - } - } - } - if ($extras) { - show('debug',"interface '$filename', $position\&$extras commands"); - } else { - show('debug',"interface '$filename', $position commands"); - } - } else { - show ('debug',"open() of ${path}$filename.xml failed"); - } - } - $Interface = $filename ; -} - -sub load_setups { - my ($primary) = @_; - # load all setup files, but default to $primary - my $t0 = [Time::HiRes::gettimeofday()]; - foreach my $setup (@setup_files) { - if ($setup ne $primary) { - load_setup ($setup_path,$setup); - show('status',"loading '$setup' took " .Time::HiRes::tv_interval($t0) . " seconds"); - $t0 = [Time::HiRes::gettimeofday()]; - }; - }; - load_setup ($setup_path,$primary); - show('status',"loading '$primary' took " .Time::HiRes::tv_interval($t0) . " seconds"); -} - -my @history = (); -my $current_history = 0; - -sub show_command { - my ($command,$nofix) = @_; - if (keys %{$setups{$Interface}}) { - my $key = ''; - if (defined $command && $command && - (defined $setups{$Interface}{$command} || - defined $setups{$Interface}{"start" . $command})) { - $key = $command; - my $whence =$locations{$Interface}{$command}; - $listbox -> selectionClear ('0.0','end') ; - $listbox -> selectionSet($whence,$whence); - $listbox -> activate($whence); - $listbox -> see($whence); - } else { - $listbox -> selectionSet('0.0','0.0') unless $listbox->curselection(); - $key = $listbox -> get($listbox->curselection()) ; - } - show('debug',"current command: $current_command"); - show('debug'," new command: $key"); - $current_command = $key ; - $textwindow -> delete ('1.0', 'end' ) ; - $partbuttons{"Command"}->select(); - $partbuttons{"Command"}->configure('-state' => 'normal'); - $partbuttons{"Description"}->configure('-state' => 'disabled'); - $partbuttons{"Comments"}->configure('-state' => 'disabled'); - $partbuttons{"Examples"}->configure('-state' => 'disabled'); - if (defined $commes{$Interface}{$key}) { - $partbuttons{"Comments"}->configure('-state' => 'normal'); - } - if (defined $descrs{$Interface}{$key}) { - $partbuttons{"Description"}->configure('-state' => 'normal'); - } - if (defined $examps{$Interface}{$key}) { - $partbuttons{"Examples"}->configure('-state' => 'normal'); - } - unless (defined $nofix && $nofix) { - push @history, $key; - $current_history = $#history; - } - do_update_command ($key) ; - $mainwindow -> update(); - $mainwindow -> focus() ; - } -} - -sub prev_command { - if ($current_history > 0) { - $current_history--; - show_command($history[$current_history],1); - } -} - -sub next_command { - unless ($current_history == $#history) { - $current_history++; - show_command($history[$current_history],1); - } -} - -sub show_description { - $textwindow -> delete ('1.0', 'end' ) ; - if (defined $descrs{$current_interface}{$current_command}) { - $textwindow-> insert ('end',$descrs{$current_interface}{$current_command}); - } - $mainwindow -> update(); - $mainwindow -> focus() ; -} - -sub show_comments { - $textwindow -> delete ('1.0', 'end' ) ; - if (defined $commes{$current_interface}{$current_command}) { - $textwindow-> insert ('end',$commes{$current_interface}{$current_command}); - } - $mainwindow -> update(); - $mainwindow -> focus() ; -} - - -sub show_examples { - $textwindow -> delete ('1.0', 'end' ) ; - if (defined $examps{$current_interface}{$current_command}) { - $textwindow-> insert ('end',join("\n\n",@{$examps{$current_interface}{$current_command}})); - } - $mainwindow -> update(); - $mainwindow -> focus() ; -} - - - - -sub has_attr { - my ($elem,$att,$val) = @_; - return 1 if (attribute($elem,$att) eq $val); - return 0; -} - - -sub view_post { - my ($stuff,$extra) = @_; - $extra = '' unless defined $extra; - $stuff =~ /^(.)(.*?)(.)$/; - my ($l,$c,$r) = ($1,$2,$3); - if ($l eq '[' || $l eq '(') { - return ($l,['symbol','par',$extra],$c,['par',$extra],$r,['symbol','par',$extra],"\n",'par'); - } else { - return ($l,['argument','par',$extra],$c,['par',$extra],$r,['argument','par',$extra],"\n",'par'); - } -} - -sub view_pre { - my ($stuff) = @_; - $stuff =~ /^(.)(.*?)(.)$/; - my ($l,$c,$r) = ($1,$2,$3); - if ($l eq '[' || $l eq '(') { - return ($l,['symbol'],$c,'',$r,['symbol']); - } else { - return ($l,['argument'],$c,'',$r,['argument']); - } -} - -sub create_setup_arguments { - my $argx = shift; - my @predisp = (); - my @postdisp = (); - foreach my $arg (children($argx)) { - if (name($arg) eq 'cd:keywords') { - # children are Constant* & Inherit? & Variable* - my @children = children($arg); - my $optional = (attribute($arg,'optional') eq 'yes' ? 'optional' : ''); - if (@children){ - push @predisp,'[', ['symbol',$optional]; - if (has_attr($arg,'list', 'yes')) { - if (has_attr($arg,'interactive', 'exclusive')) { - push @predisp, '...', ''; - } else { - push @predisp, '..,...,..', ''; - } - } else { - push @predisp,'...', ''; - } - push @predisp,']', ['symbol',$optional]; - } - push @postdisp,'[', ['symbol','par',$optional]; - my $firsttrue = 1; - foreach my $kwd (@children) { - if ($firsttrue) { - $firsttrue = 0; - } else { - push @postdisp,', ', ['symbol','par']; - } - if (name($kwd) eq 'cd:constant' || - name($kwd) eq 'cd:variable') { - my $v = attribute($kwd,'type'); - my $def = ''; - my $var = ''; - $var = 'variable' if (name($kwd) eq 'cd:variable') ; - $def = 'default' if (has_attr($kwd,'default', 'yes')); - if ($v =~ /^cd:/) { - $v =~ s/^cd://; - $v .= "s" if (has_attr($arg,'list', 'yes')); - push @postdisp, $v, ['user',$def,'par',$var]; - } else { - push @postdisp, $v, [$def,'par',$var]; - } - } elsif (name($kwd) eq 'cd:inherit') { - my $v = attribute($kwd,'name'); - $textwindow -> tag ('configure', $v , -foreground => 'blue3',-underline => 1 ) ; - $textwindow -> tagBind($v,'',sub {show_command($v)} ); - push @postdisp,"see ","par", "$v", [$v,'par']; - } - } - push @postdisp,']', ['symbol','par',$optional]; - push @postdisp,"\n", 'par'; - } elsif (name($arg) eq 'cd:assignments') { - # children are Parameter* & Inherit? - my @children = children($arg); - my $optional = (attribute($arg,'optional') eq 'yes' ? 'optional' : ''); - if (@children) { - push @predisp,'[', ['symbol',$optional]; - if (has_attr($arg,'list', 'yes')) { - push @predisp, '..,..=..,..', ''; - } else { - push @predisp,'..=..', ''; - } - push @predisp,']', ['symbol',$optional]; - push @postdisp,'[', ['symbol','par',$optional]; - my $isfirst = 1; - foreach my $assn (@children) { - if ($isfirst) { - $isfirst = 0; - } else { - push @postdisp,",\n ", ['symbol','par']; - } - if (name($assn) eq 'cd:parameter') { - push @postdisp,attribute($assn,'name'), 'par'; - push @postdisp,'=', ['symbol','par']; - my $firstxtrue = 1; - foreach my $par (children($assn)) { - if ($firstxtrue) { - $firstxtrue = 0; - } else { - push @postdisp,'|', ['or','par']; - } - if (name($par) eq 'cd:constant' || name($par) eq 'cd:variable') { - my $var = ''; - $var = 'variable' if name($par) eq 'cd:variable'; - my $v = attribute($par,'type'); - if ($v =~ /^cd:/) { - $v =~ s/^cd://; - push @postdisp,$v, ['user','par',$var]; - } else { - push @postdisp,$v, ['par',$var]; - } - } - } - } elsif (name($assn) eq 'cd:inherit') { - my $v = attribute($assn,'name'); - $textwindow -> tag ('configure', $v , -foreground => 'blue3',-underline => 1 ) ; - $textwindow -> tagBind($v,'',sub {show_command($v)} ); - push @postdisp,"see ","par", "$v", [$v,'par']; - } - } - push @postdisp,"]", ['symbol','par',$optional], "\n", ''; - } - } elsif (name($arg) eq 'cd:content') { - push @predisp, view_pre('{...}'); - push @postdisp, view_post('{...}'); - } elsif (name($arg) eq 'cd:triplet') { - if (has_attr($arg,'list','yes')) { - push @predisp, view_pre('[x:y:z=,..]'); - push @postdisp,view_post('[x:y:z=,..]'); - } else { - push @predisp, view_pre('[x:y:z=]'); - push @postdisp,view_post('[x:y:z=]'); - } - } elsif (name($arg) eq 'cd:reference') { - my $optional = (attribute($arg,'optional') eq 'yes' ? 'optional' : ''); - if (has_attr($arg,'list','yes')) { - push @postdisp, view_post('[ref,..]',$optional); - push @predisp, view_pre('[ref,..]'); - } else { - push @postdisp, view_post('[ref]',$optional); - push @predisp, view_pre('[ref]');; - } - } elsif (name($arg) eq 'cd:word') { - if (has_attr($arg,'list','yes')) { - push @predisp, view_pre ('{...,...}'); - push @postdisp,view_post('{...,...}'); - } else { - push @predisp, view_pre('{...}'); - push @postdisp, view_post('{...}'); - } - } elsif (name($arg) eq 'cd:nothing') { - my $sep = attribute($arg,'separator'); - if ($sep) { - if($sep eq 'backslash') { -# push @postdisp,'\\\\','par'; - push @predisp,'\\\\',''; - } else { -# push @postdisp,$sep,'par'; - push @predisp,$sep,''; - } - } - push @predisp,'...',''; - push @postdisp,'text',['variable','par'],"\n",'par'; - } elsif (name($arg) eq 'cd:file') { - push @predisp,'...',['default']; - push @postdisp,'...',['default','par'],"\n",'par'; - } elsif (name($arg) eq 'cd:csname') { - push @predisp,'\command',['command']; - push @postdisp,'\command',['command','par'],"\n",'par'; - } elsif (name($arg) eq 'cd:index') { - if (has_attr($arg,'list','yes')) { - push @predisp,view_pre('{..+...+..}'); - push @postdisp,view_post('{..+...+..}'); - } else { - push @predisp, view_pre('{...}'); - push @postdisp,view_post('{...}'); - } - } elsif (name($arg) eq 'cd:position') { - if (has_attr($arg,'list','yes')) { - push @predisp,view_pre('(...,...)'); - push @postdisp,view_post('(...,...)'); - } else { - push @predisp,view_pre('(...)'); - push @postdisp,view_post('(...)'); - } - } elsif (name($arg) eq 'cd:displaymath') { - push @predisp, ('$$',['argument'],'...','','$$',['argument']); - push @postdisp, ('$$',['argument','par'],'...',['par'],'$$',['argument','par']); - } elsif (name($arg) eq 'cd:tex') { - my $sep = attribute($arg,'separator'); - if ($sep) { - if($sep eq 'backslash') { -# push @postdisp,'\\\\','par'; - push @predisp,'\\\\',''; - } else { -# push @postdisp,$sep,'par'; - push @predisp,$sep,''; - } - } - my $cmd = "\\" . attribute($arg,'command'); - push @predisp,$cmd,''; -# push @postdisp,$cmd,['command','par'],"\n",'par'; - } - } - return (\@predisp,\@postdisp); -} - - -# Hello thereHowdydo -# -# would be: -# -# Tag Content -# ================================================================== -# [foo, [{}, head, [{id => "a"}, 0, "Hello ", em, [{}, 0, "there"]], -# bar, [ {}, 0, "Howdy", ref, [{}]], -# 0, "do" -# ] -# ] - -sub attribute { - my ($elem,$att) = @_; - if (defined $elem->[1] && defined $elem->[1]->[0] && defined $elem->[1]->[0]->{$att}) { - my $ret = $elem->[1]->[0]->{$att}; - show ('debug',"returning attribute $att=$ret"); - return $elem->[1]->[0]->{$att}; - } else { - return ''; - } -} - -sub name { - my ($elem) = @_; - if (defined $elem->[0] ) { - return $elem->[0]; - } else { - return ''; - } -} - -# return all children at a certain level -sub children { - my ($elem) = @_; - if (defined $elem->[1] && defined $elem->[1]->[1]) { - my @items = @{$elem->[1]}; - shift @items ; # deletes the attribute. - my @ret = (); - while (@items) { - push @ret, [shift @items, shift @items]; - } - return @ret; - } else { - return (); - } -} - -# return the first child with the right name -sub find { - my ($elem,$name) = @_; - if ($elem->[0] eq $name) { - return $elem; - } - if (ref($elem->[1]) eq 'ARRAY') { - my @contents = @{$elem->[1]}; - shift @contents; - while (my $ename = shift @contents) { - my $con = shift @contents; - if ($ename eq $name) { - return [$ename,$con]; - } - } - } - return []; -} - -sub do_update_command # type: 0=display, 1=compute only - { my ($command, $type) = @_ ; - $type = 0 unless defined $type; - my $setup; - if (!defined $trees{$Interface}{$command}) { - my $parser = XML::Parser->new('Style' => 'Tree'); - $trees{$Interface}{$command} = $parser->parse($setups{$Interface}{$command}); - } - $setup = $trees{$Interface}{$command} ; - my $predisp = undef; - my $postdisp = undef; - my @cmddisp = (); - my @cmddispafter = (); - my $pradisp = undef; - my $altdisp = undef; - if (attribute($setup,'file')) { - my $filename = attribute($setup,'file'); - my $fileline = attribute($setup,'line') || 0; - $textwindow->insert ('end',"$filename:${fileline}::\n\n", '' ); - } - # start with backslash - push @cmddisp, "\\", 'command' ; - my $env = 0; - if (has_attr($setup,'type','environment')) { - $env = 1; - } - if ($env) { push @cmddisp, "start", 'command' ; } - if ($env) { push @cmddispafter, " ... ", '', "\\stop", 'command' ; } - my $seq = find($setup,'cd:sequence'); - # display rest of command name - foreach my $seqpart (children($seq)) { - my $text = attribute($seqpart,'value'); - if (name($seqpart) eq 'cd:variable') { - push @cmddisp, $text, ['command','user']; - if ($env) { push @cmddispafter, $text, ['command','user']; } - } elsif (name($seqpart) eq 'cd:string') { - push @cmddisp, $text, 'command'; - if ($env) { push @cmddispafter, $text, 'command'; } - } - } - # - my $args = find($setup,'cd:arguments'); - # display commands - if ($args) { - my $curarg = 0; - foreach my $arg (children($args)) { - if (name($arg) eq 'cd:choice') { - my ($a,$b) = children($arg); - ($predisp,$postdisp) = create_setup_arguments(['cd:arguments',[{}, @$a]]); - ($pradisp,$altdisp) = create_setup_arguments(['cd:arguments',[{}, @$b]]); - } else { - ($predisp,$postdisp) = create_setup_arguments($args); - } - $curarg++; - } - } - return if $type; - if(defined $postdisp) { - if(defined $altdisp) { - $textwindow->insert('end',@cmddisp,@$predisp,@cmddispafter, "\n",'', - @cmddisp,@$pradisp,@cmddispafter, "\n\n",'', - @cmddisp, "\n",'', - @$postdisp, "\n",'', - @cmddisp, "\n",'', - @$altdisp); - } else { - $textwindow->insert('end',@cmddisp,@$predisp, @cmddispafter ,"\n\n",'', - @cmddisp,"\n",'', - @$postdisp); - } - } else { - $textwindow->insert('end',@cmddisp); - } -} - - -#D The next feature is dedicated to Tobias, who suggested -#D it, and Taco, who saw it as yet another proof of the -#D speed of \PERL. It's also dedicated to Ton, who needs it -#D for translating the big manual. - -sub handle_request { - my $index = $listbox -> index('end') ; - return unless $index; - my $req = $request -> get ; - return unless $req; - $req =~ s/\\//o ; - $req =~ s/\s//go ; - $request -> delete('0','end') ; - $request -> insert('0',$req) ; - return unless $req; - my ($l,$c) = split (/\./,$index) ; - for (my $i=0;$i<=$l;$i++) { - $index = "$i.0" ; - my $str = $listbox -> get ($index, $index) ; - if (defined $str && ref($str) eq 'ARRAY') { - $str = "@{$str}"; - } - if (defined $str && $str =~ /^$req/) { - show_command($str) ; - return ; - } - } -} - -sub insert_request { - my ($self, $chr) = @_ ; - # don't echo duplicate if $chr was keyed in in the (focussed) entrybox - $request -> insert ('end', $chr) unless $self eq $request; - handle_request(); -} - -sub delete_request { - my $self = shift ; - # delete last character, carefully - if ($self ne $request) { - my $to = $request -> index ('end') ; - my $from = $to - 1 ; - if ($from<0) { $from = 0 } - $request -> delete ($from,$to); - } - handle_request(); -} - -sub new_request { - $request -> delete (0,'end') ; - handle_request(); -} - diff --git a/Master/texmf-dist/scripts/context/ruby/base/ctx.rb b/Master/texmf-dist/scripts/context/ruby/base/ctx.rb index a077297f243..13b4045afe0 100644 --- a/Master/texmf-dist/scripts/context/ruby/base/ctx.rb +++ b/Master/texmf-dist/scripts/context/ruby/base/ctx.rb @@ -124,6 +124,11 @@ class CtxRunner report("loading ctx file #{@ctxname}") end + if @xmldata then + # out if a sudden rexml started to be picky about namespaces + @xmldata.gsub!(//,"") + end + begin @xmldata = REXML::Document.new(@xmldata) rescue diff --git a/Master/texmf-dist/scripts/context/ruby/base/tex.rb b/Master/texmf-dist/scripts/context/ruby/base/tex.rb index 23db7f1e872..84025693b01 100644 --- a/Master/texmf-dist/scripts/context/ruby/base/tex.rb +++ b/Master/texmf-dist/scripts/context/ruby/base/tex.rb @@ -680,6 +680,11 @@ class TEX texformatpath = '' setvariable('error','no permissions to write') end + if not mpsformats then + # we want metafun to be in sync + setvariable('mpsformats',defaultmpsformats) + mpsformats = validmpsformat(getarrayvariable('mpsformats')) + end else texformatpath = '' end @@ -756,7 +761,8 @@ class TEX # utilities report('start of analysis') results = Array.new - ['texexec','texutil','ctxtools'].each do |program| + # ['texexec','texutil','ctxtools'].each do |program| + ['texexec'].each do |program| result = `texmfstart #{program} --help` result.sub!(/.*?(#{program}[^\n]+)\n.*/mi) do $1 end results.push("#{result}") @@ -1718,10 +1724,10 @@ end def fixbackendvars(backend) if backend then - report("fixing backend map path for #{backend}") if getvariable('verbose') ENV['backend'] = backend ; ENV['progname'] = backend unless validtexengine(backend) - ENV['TEXFONTMAPS'] = ['.',"\$TEXMF/fonts/map/{#{backend},pdftex,dvips,}//",'./fonts//'].join_path + ENV['TEXFONTMAPS'] = ['.',"\$TEXMF/fonts/{data,map}/{#{backend},pdftex,dvips,}//",'./fonts//'].join_path + report("fixing backend map path for #{backend}: #{ENV['TEXFONTMAPS']}") if getvariable('verbose') else report("unable to fix backend map path") if getvariable('verbose') end @@ -2047,7 +2053,10 @@ end Kpse.runscript('ctxtools',rawbase,'--purge') if getvariable('purge') Kpse.runscript('ctxtools',rawbase,'--purge --all') if getvariable('purgeall') -# till here + + # runcommand('mtxrun','--script','ctxtools',rawbase,'--purge') if getvariable('purge') + # runcommand('mtxrun','--script','ctxtools',rawbase,'--purge --all') if getvariable('purgeall') + when 'latex' then ok = runtex(rawname) diff --git a/Master/texmf-dist/scripts/context/ruby/newimgtopdf.rb b/Master/texmf-dist/scripts/context/ruby/newimgtopdf.rb deleted file mode 100644 index 563ae5b80a0..00000000000 --- a/Master/texmf-dist/scripts/context/ruby/newimgtopdf.rb +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env ruby - -# program : imgtopdf -# copyright : PRAGMA Advanced Document Engineering -# version : 2002-2005 -# author : Hans Hagen - -load(File.join(File.expand_path(File.dirname($0)),'imgtopdf.rb')) diff --git a/Master/texmf-dist/scripts/context/ruby/newpstopdf.rb b/Master/texmf-dist/scripts/context/ruby/newpstopdf.rb deleted file mode 100644 index a45b4cab896..00000000000 --- a/Master/texmf-dist/scripts/context/ruby/newpstopdf.rb +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env ruby - -# program : pstopdf -# copyright : PRAGMA Advanced Document Engineering -# version : 2002-2005 -# author : Hans Hagen - -load(File.join(File.expand_path(File.dirname($0)),'pstopdf.rb')) diff --git a/Master/texmf-dist/scripts/context/ruby/newtexexec.rb b/Master/texmf-dist/scripts/context/ruby/newtexexec.rb deleted file mode 100644 index 6b4db52dc51..00000000000 --- a/Master/texmf-dist/scripts/context/ruby/newtexexec.rb +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env ruby - -# program : texexec -# copyright : PRAGMA Advanced Document Engineering -# version : 1997-2006 -# author : Hans Hagen - -load(File.join(File.expand_path(File.dirname($0)),'texexec.rb')) diff --git a/Master/texmf-dist/scripts/context/ruby/newtexutil.rb b/Master/texmf-dist/scripts/context/ruby/newtexutil.rb deleted file mode 100644 index d6dd06a71ee..00000000000 --- a/Master/texmf-dist/scripts/context/ruby/newtexutil.rb +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env ruby - -# program : texexec -# copyright : PRAGMA Advanced Document Engineering -# version : 1997-2006 -# author : Hans Hagen - -load(File.join(File.expand_path(File.dirname($0)),'texutil.rb')) diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/context.cmd b/Master/texmf-dist/scripts/context/stubs/mswin/context.cmd deleted file mode 100755 index 11303c2714e..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/context.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --script context %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/context.exe b/Master/texmf-dist/scripts/context/stubs/mswin/context.exe new file mode 100644 index 00000000000..2d45f27494d Binary files /dev/null and b/Master/texmf-dist/scripts/context/stubs/mswin/context.exe differ diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/ctxtools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/ctxtools.bat deleted file mode 100755 index f502b6750bd..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/ctxtools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute ctxtools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/luatools.cmd b/Master/texmf-dist/scripts/context/stubs/mswin/luatools.cmd deleted file mode 100755 index 635ee0db3d5..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/luatools.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%luatools.lua" %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/luatools.exe b/Master/texmf-dist/scripts/context/stubs/mswin/luatools.exe new file mode 100644 index 00000000000..2d45f27494d Binary files /dev/null and b/Master/texmf-dist/scripts/context/stubs/mswin/luatools.exe differ diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/luatools.lua b/Master/texmf-dist/scripts/context/stubs/mswin/luatools.lua index 433d1b8dc0a..1d87322c108 100644 --- a/Master/texmf-dist/scripts/context/stubs/mswin/luatools.lua +++ b/Master/texmf-dist/scripts/context/stubs/mswin/luatools.lua @@ -39,13 +39,16 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-string'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local sub, gsub, find, match, gmatch, format, char, byte, rep = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep +local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower +local lpegmatch = lpeg.match + +-- some functions may disappear as they are not used anywhere if not string.split then @@ -85,8 +88,16 @@ function string:unquote() return (gsub(self,"^([\"\'])(.*)%1$","%2")) end +--~ function string:unquote() +--~ if find(self,"^[\'\"]") then +--~ return sub(self,2,-2) +--~ else +--~ return self +--~ end +--~ end + function string:quote() -- we could use format("%q") - return '"' .. self:unquote() .. '"' + return format("%q",self) end function string:count(pattern) -- variant 3 @@ -106,12 +117,23 @@ function string:limit(n,sentinel) end end -function string:strip() - return (gsub(self,"^%s*(.-)%s*$", "%1")) +--~ function string:strip() -- the .- is quite efficient +--~ -- return match(self,"^%s*(.-)%s*$") or "" +--~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list +--~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)') +--~ end + +do -- roberto's variant: + local space = lpeg.S(" \t\v\n") + local nospace = 1 - space + local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0) + function string.strip(str) + return lpegmatch(stripper,str) or "" + end end function string:is_empty() - return not find(find,"%S") + return not find(self,"%S") end function string:enhance(pattern,action) @@ -145,14 +167,14 @@ if not string.characters then local function nextchar(str, index) index = index + 1 - return (index <= #str) and index or nil, str:sub(index,index) + return (index <= #str) and index or nil, sub(str,index,index) end function string:characters() return nextchar, self, 0 end local function nextbyte(str, index) index = index + 1 - return (index <= #str) and index or nil, byte(str:sub(index,index)) + return (index <= #str) and index or nil, byte(sub(str,index,index)) end function string:bytes() return nextbyte, self, 0 @@ -165,7 +187,7 @@ end function string:rpadd(n,chr) local m = n-#self if m > 0 then - return self .. self.rep(chr or " ",m) + return self .. rep(chr or " ",m) else return self end @@ -174,7 +196,7 @@ end function string:lpadd(n,chr) local m = n-#self if m > 0 then - return self.rep(chr or " ",m) .. self + return rep(chr or " ",m) .. self else return self end @@ -222,6 +244,17 @@ function string:pattesc() return (gsub(self,".",patterns_escapes)) end +local simple_escapes = { + ["-"] = "%-", + ["."] = "%.", + ["?"] = ".", + ["*"] = ".*", +} + +function string:simpleesc() + return (gsub(self,".",simple_escapes)) +end + function string:tohash() local t = { } for s in gmatch(self,"([^, ]+)") do -- lpeg @@ -233,10 +266,10 @@ end local pattern = lpeg.Ct(lpeg.C(1)^0) function string:totable() - return pattern:match(self) + return lpegmatch(pattern,self) end ---~ for _, str in ipairs { +--~ local t = { --~ "1234567123456712345671234567", --~ "a\tb\tc", --~ "aa\tbb\tcc", @@ -244,7 +277,10 @@ end --~ "aaaa\tbbbb\tcccc", --~ "aaaaa\tbbbbb\tccccc", --~ "aaaaaa\tbbbbbb\tcccccc", ---~ } do print(string.tabtospace(str)) end +--~ } +--~ for k,v do +--~ print(string.tabtospace(t[k])) +--~ end function string.tabtospace(str,tab) -- we don't handle embedded newlines @@ -252,7 +288,7 @@ function string.tabtospace(str,tab) local s = find(str,"\t") if s then if not tab then tab = 7 end -- only when found - local d = tab-(s-1)%tab + local d = tab-(s-1) % tab if d > 0 then str = gsub(str,"\t",rep(" ",d),1) else @@ -271,6 +307,25 @@ function string:compactlong() -- strips newlines and leading spaces return self end +function string:striplong() -- strips newlines and leading spaces + self = gsub(self,"^%s*","") + self = gsub(self,"[\n\r]+ *","\n") + return self +end + +function string:topattern(lowercase,strict) + if lowercase then + self = lower(self) + end + self = gsub(self,".",simple_escapes) + if self == "" then + self = ".*" + elseif strict then + self = "^" .. self .. "$" + end + return self +end + end -- of closure @@ -278,58 +333,64 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-lpeg'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc - ---~ l-lpeg.lua : - ---~ lpeg.digit = lpeg.R('09')^1 ---~ lpeg.sign = lpeg.S('+-')^1 ---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1) ---~ lpeg.number = lpeg.float + lpeg.integer ---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1 ---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1 ---~ lpeg.uppercase = lpeg.P("AZ") ---~ lpeg.lowercase = lpeg.P("az") - ---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed ---~ lpeg.space = lpeg.S(' ')^1 ---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1 ---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1 ---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1 - -local hash = { } +local lpeg = require("lpeg") + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + +local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V +local match = lpeg.match + +local digit, sign = R('09'), S('+-') +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local utf8byte = R("\128\191") + +patterns.utf8byte = utf8byte +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8byte +patterns.utf8three = R("\224\239") * utf8byte * utf8byte +patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte + +patterns.digit = digit +patterns.sign = sign +patterns.cardinal = sign^0 * digit^1 +patterns.integer = sign^0 * digit^1 +patterns.float = sign^0 * digit^0 * P('.') * digit^1 +patterns.number = patterns.float + patterns.integer +patterns.oct = P("0") * R("07")^1 +patterns.octal = patterns.oct +patterns.HEX = P("0x") * R("09","AF")^1 +patterns.hex = P("0x") * R("09","af")^1 +patterns.hexadecimal = P("0x") * R("09","AF","af")^1 +patterns.lowercase = R("az") +patterns.uppercase = R("AZ") +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = S(" ") +patterns.eol = S("\n\r") +patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) +patterns.newline = crlf + cr + lf +patterns.nonspace = 1 - patterns.space +patterns.nonspacer = 1 - patterns.spacer +patterns.whitespace = patterns.eol + patterns.spacer +patterns.nonwhitespace = 1 - patterns.whitespace +patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * lpeg.V(1) } -end - -function lpeg.startswith(pattern) --slightly adapted - return P(pattern) + return P { P(pattern) + 1 * V(1) } -- why so complex? end function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end --- variant: - ---~ local parser = lpeg.Ct(lpeg.splitat(newline)) - -local crlf = P("\r\n") -local cr = P("\r") -local lf = P("\n") -local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -local newline = crlf + cr + lf -local spacing = space^0 * newline - +local spacing = patterns.spacer^0 * patterns.newline -- sort of strip local empty = spacing * Cc("") local nonempty = Cs((1-spacing)^1) * spacing^-1 local content = (empty + nonempty)^1 @@ -337,15 +398,15 @@ local content = (empty + nonempty)^1 local capture = Ct(content^0) function string:splitlines() - return capture:match(self) + return match(capture,self) end -lpeg.linebyline = content -- better make a sublibrary +patterns.textline = content ---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more ---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more ---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps ---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps local splitters_s, splitters_m = { }, { } @@ -355,7 +416,7 @@ local function splitat(separator,single) separator = P(separator) if single then local other, any = C((1 - separator)^0), P(1) - splitter = other * (separator * C(any^0) + "") + splitter = other * (separator * C(any^0) + "") -- ? splitters_s[separator] = splitter else local other = C((1 - separator)^0) @@ -370,15 +431,72 @@ lpeg.splitat = splitat local cache = { } +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = Ct(splitat(separator)) + cache[separator] = c + end + return match(c,str) +end + function string:split(separator) local c = cache[separator] if not c then c = Ct(splitat(separator)) cache[separator] = c end - return c:match(self) + return match(c,self) +end + +lpeg.splitters = cache + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,str) +end + +function string:checkedsplit(separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,self) end +--~ function lpeg.append(list,pp) +--~ local p = pp +--~ for l=1,#list do +--~ if p then +--~ p = p + P(list[l]) +--~ else +--~ p = P(list[l]) +--~ end +--~ end +--~ return p +--~ end + +--~ from roberto's site: + +local f1 = string.byte + +local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + end -- of closure @@ -386,7 +504,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-table'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -395,9 +513,10 @@ if not modules then modules = { } end modules ['l-table'] = { table.join = table.concat local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove -local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump +local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match local getmetatable, setmetatable = getmetatable, setmetatable -local type, next, tostring, ipairs = type, next, tostring, ipairs +local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs +local unpack = unpack or table.unpack function table.strip(tab) local lst = { } @@ -412,6 +531,14 @@ function table.strip(tab) return lst end +function table.keys(t) + local k = { } + for key, _ in next, t do + k[#k+1] = key + end + return k +end + local function compare(a,b) return (tostring(a) < tostring(b)) end @@ -455,7 +582,7 @@ end table.sortedkeys = sortedkeys table.sortedhashkeys = sortedhashkeys -function table.sortedpairs(t) +function table.sortedhash(t) local s = sortedhashkeys(t) -- maybe just sortedkeys local n = 0 local function kv(s) @@ -466,6 +593,8 @@ function table.sortedpairs(t) return kv, s end +table.sortedpairs = table.sortedhash + function table.append(t, list) for _,v in next, list do insert(t,v) @@ -588,18 +717,18 @@ end -- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) -function table.is_empty(t) +function table.is_empty(t) -- obolete, use inline code instead return not t or not next(t) end -function table.one_entry(t) +function table.one_entry(t) -- obolete, use inline code instead local n = next(t) return n and not next(t,n) end -function table.starts_at(t) - return ipairs(t,1)(t,0) -end +--~ function table.starts_at(t) -- obsolete, not nice +--~ return ipairs(t,1)(t,0) +--~ end function table.tohash(t,value) local h = { } @@ -677,6 +806,8 @@ end -- -- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) +-- problem: there no good number_to_string converter with the best resolution + local function do_serialize(root,name,depth,level,indexed) if level > 0 then depth = depth .. " " @@ -699,8 +830,9 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s{",depth)) end end + -- we could check for k (index) being number (cardinal) if root and next(root) then - local first, last = nil, 0 -- #root cannot be trusted here + local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) if compact then -- NOT: for k=1,#root do (we need to quit at nil) for k,v in ipairs(root) do -- can we use next? @@ -721,10 +853,10 @@ local function do_serialize(root,name,depth,level,indexed) if hexify then handle(format("%s 0x%04X,",depth,v)) else - handle(format("%s %s,",depth,v)) + handle(format("%s %s,",depth,v)) -- %.99g end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) @@ -761,29 +893,29 @@ local function do_serialize(root,name,depth,level,indexed) --~ if hexify then --~ handle(format("%s %s=0x%04X,",depth,key(k),v)) --~ else - --~ handle(format("%s %s=%s,",depth,key(k),v)) + --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g --~ end if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) else - handle(format("%s [%s]=%s,",depth,k,v)) + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g end elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then if hexify then handle(format("%s %s=0x%04X,",depth,k,v)) else - handle(format("%s %s=%s,",depth,k,v)) + handle(format("%s %s=%s,",depth,k,v)) -- %.99g end else if hexify then handle(format("%s [%q]=0x%04X,",depth,k,v)) else - handle(format("%s [%q]=%s,",depth,k,v)) + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g end end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then --~ handle(format("%s %s=%s,",depth,key(k),v)) if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then @@ -992,7 +1124,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify) end end -local function flatten(t,f,complete) +local function flatten(t,f,complete) -- is this used? meybe a variant with next, ... for i=1,#t do local v = t[i] if type(v) == "table" then @@ -1021,6 +1153,24 @@ end table.flatten_one_level = table.unnest +-- a better one: + +local function flattened(t,f) + if not f then + f = { } + end + for k, v in next, t do + if type(v) == "table" then + flattened(v,f) + else + f[k] = v + end + end + return f +end + +table.flattened = flattened + -- the next three may disappear function table.remove_value(t,value) -- todo: n @@ -1156,7 +1306,7 @@ function table.clone(t,p) -- t is optional or nil or table elseif not t then t = { } end - setmetatable(t, { __index = function(_,key) return p[key] end }) + setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ? return t end @@ -1184,21 +1334,35 @@ function table.reverse(t) return tt end ---~ function table.keys(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return k ---~ end +function table.insert_before_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i,extra) + return + end + end + insert(t,1,extra) +end ---~ function table.keys_as_string(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return concat(k,"") ---~ end +function table.insert_after_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i+1,extra) + return + end + end + insert(t,#t+1,extra) +end end -- of closure @@ -1207,13 +1371,13 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-io'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local byte = string.byte +local byte, find, gsub = string.byte, string.find, string.gsub if string.find(os.getenv("PATH"),";") then io.fileseparator, io.pathseparator = "\\", ";" @@ -1242,7 +1406,7 @@ function io.savedata(filename,data,joiner) elseif type(data) == "function" then data(f) else - f:write(data) + f:write(data or "") end f:close() return true @@ -1371,20 +1535,21 @@ function io.ask(question,default,options) end io.write(string.format(" ")) local answer = io.read() - answer = answer:gsub("^%s*(.*)%s*$","%1") + answer = gsub(answer,"^%s*(.*)%s*$","%1") if answer == "" and default then return default elseif not options then return answer else - for _,v in pairs(options) do - if v == answer then + for k=1,#options do + if options[k] == answer then return answer end end local pattern = "^" .. answer - for _,v in pairs(options) do - if v:find(pattern) then + for k=1,#options do + local v = options[k] + if find(v,pattern) then return v end end @@ -1399,20 +1564,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-number'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format = string.format +local tostring = tostring +local format, floor, insert, match = string.format, math.floor, table.insert, string.match +local lpegmatch = lpeg.match number = number or { } -- a,b,c,d,e,f = number.toset(100101) function number.toset(n) - return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") + return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") end function number.toevenhex(n) @@ -1438,10 +1605,21 @@ end local one = lpeg.C(1-lpeg.S(''))^1 function number.toset(n) - return one:match(tostring(n)) + return lpegmatch(one,tostring(n)) end - +function number.bits(n,zero) + local t, i = { }, (zero and 0) or 1 + while n > 0 do + local m = n % 2 + if m > 0 then + insert(t,1,i) + end + n = floor(n/2) + i = i + 1 + end + return t +end end -- of closure @@ -1450,7 +1628,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-set'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1540,46 +1718,63 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-os'] = { version = 1.001, - comment = "companion to luat-lub.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local find = string.find +-- maybe build io.flush in os.execute + +local find, format, gsub = string.find, string.format, string.gsub +local random, ceil = math.random, math.ceil + +local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush + +function os.execute(...) ioflush() return execute(...) end +function os.spawn (...) ioflush() return spawn (...) end +function os.exec (...) ioflush() return exec (...) end function os.resultof(command) - return io.popen(command,"r"):read("*all") + ioflush() -- else messed up logging + local handle = io.popen(command,"r") + if not handle then + -- print("unknown command '".. command .. "' in os.resultof") + return "" + else + return handle:read("*all") or "" + end end -if not os.exec then os.exec = os.execute end -if not os.spawn then os.spawn = os.execute end - ---~ os.type : windows | unix (new, we already guessed os.platform) ---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.type : windows | unix (new, we already guessed os.platform) +--~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.platform : extended os.name with architecture if not io.fileseparator then if find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator, os.platform = "\\", ";", os.type or "windows" + io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" else - io.fileseparator, io.pathseparator, os.platform = "/" , ":", os.type or "unix" + io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" end end -os.platform = os.platform or os.type or (io.pathseparator == ";" and "windows") or "unix" +os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" +os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" + +if os.type == "windows" then + os.libsuffix, os.binsuffix = 'dll', 'exe' +else + os.libsuffix, os.binsuffix = 'so', '' +end function os.launch(str) - if os.platform == "windows" then + if os.type == "windows" then os.execute("start " .. str) -- os.spawn ? else os.execute(str .. " &") -- os.spawn ? end end -if not os.setenv then - function os.setenv() return false end -end - if not os.times then -- utime = user time -- stime = system time @@ -1609,64 +1804,218 @@ end --~ print(os.date("%H:%M:%S",os.gettimeofday())) --~ print(os.date("%H:%M:%S",os.time())) -os.arch = os.arch or function() - local a = os.resultof("uname -m") or "linux" - os.arch = function() - return a +-- no need for function anymore as we have more clever code and helpers now +-- this metatable trickery might as well disappear + +os.resolvers = os.resolvers or { } + +local resolvers = os.resolvers + +local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil +local osix = osmt.__index + +osmt.__index = function(t,k) + return (resolvers[k] or osix)(t,k) +end + +setmetatable(os,osmt) + +if not os.setenv then + + -- we still store them but they won't be seen in + -- child processes although we might pass them some day + -- using command concatination + + local env, getenv = { }, os.getenv + + function os.setenv(k,v) + env[k] = v + end + + function os.getenv(k) + return env[k] or getenv(k) end - return a + end -local platform +-- we can use HOSTTYPE on some platforms -function os.currentplatform(name,default) - if not platform then - local name = os.name or os.platform or name -- os.name is built in, os.platform is mine - if not name then - platform = default or "linux" - elseif name == "windows" or name == "mswin" or name == "win32" or name == "msdos" then - if os.getenv("PROCESSOR_ARCHITECTURE") == "AMD64" then - platform = "mswin-64" - else - platform = "mswin" - end +local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" + +local function guess() + local architecture = os.resultof("uname -m") or "" + if architecture ~= "" then + return architecture + end + architecture = os.getenv("HOSTTYPE") or "" + if architecture ~= "" then + return architecture + end + return os.resultof("echo $HOSTTYPE") or "" +end + +if platform ~= "" then + + os.platform = platform + +elseif os.type == "windows" then + + -- we could set the variable directly, no function needed here + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" + if find(architecture,"AMD64") then + platform = "mswin-64" else - local architecture = os.arch() - if name == "linux" then - if find(architecture,"x86_64") then - platform = "linux-64" - elseif find(architecture,"ppc") then - platform = "linux-ppc" - else - platform = "linux" - end - elseif name == "macosx" then - if find(architecture,"i386") then - platform = "osx-intel" - else - platform = "osx-ppc" - end - elseif name == "sunos" then - if find(architecture,"sparc") then - platform = "solaris-sparc" - else -- if architecture == 'i86pc' - platform = "solaris-intel" - end - elseif name == "freebsd" then - if find(architecture,"amd64") then - platform = "freebsd-amd64" - else - platform = "freebsd" - end - else - platform = default or name - end + platform = "mswin" end - function os.currentplatform() - return platform + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "linux" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "linux-64" + elseif find(architecture,"ppc") then + platform = "linux-ppc" + else + platform = "linux" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "macosx" then + + --[[ + Identifying the architecture of OSX is quite a mess and this + is the best we can come up with. For some reason $HOSTTYPE is + a kind of pseudo environment variable, not known to the current + environment. And yes, uname cannot be trusted either, so there + is a change that you end up with a 32 bit run on a 64 bit system. + Also, some proper 64 bit intel macs are too cheap (low-end) and + therefore not permitted to run the 64 bit kernel. + ]]-- + + function os.resolvers.platform(t,k) + -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" + -- if architecture == "" then + -- architecture = os.resultof("echo $HOSTTYPE") or "" + -- end + local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" + if architecture == "" then + -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") + platform = "osx-intel" + elseif find(architecture,"i386") then + platform = "osx-intel" + elseif find(architecture,"x86_64") then + platform = "osx-64" + else + platform = "osx-ppc" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "sunos" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"sparc") then + platform = "solaris-sparc" + else -- if architecture == 'i86pc' + platform = "solaris-intel" end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "freebsd" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"amd64") then + platform = "freebsd-amd64" + else + platform = "freebsd" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "kfreebsd" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "kfreebsd-64" + else + platform = "kfreebsd-i386" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +else + + -- platform = "linux" + -- os.setenv("MTX_PLATFORM",platform) + -- os.platform = platform + + function os.resolvers.platform(t,k) + local platform = "linux" + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +end + +-- beware, we set the randomseed + +-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the +-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom +-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal +-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. +-- +-- as we don't call this function too often there is not so much risk on repetition + +local t = { 8, 9, "a", "b" } + +function os.uuid() + return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", + random(0xFFFF),random(0xFFFF), + random(0x0FFF), + t[ceil(random(4))] or 8,random(0x0FFF), + random(0xFFFF), + random(0xFFFF),random(0xFFFF),random(0xFFFF) + ) +end + +local d + +function os.timezone(delta) + d = d or tonumber(tonumber(os.date("%H")-os.date("!%H"))) + if delta then + if d > 0 then + return format("+%02i:00",d) + else + return format("-%02i:00",-d) + end + else + return 1 end - return platform end @@ -1676,7 +2025,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-file'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1687,14 +2036,17 @@ if not modules then modules = { } end modules ['l-file'] = { file = file or { } local concat = table.concat -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char +local lpegmatch = lpeg.match function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) end function file.addsuffix(filename, suffix) - if not find(filename,"%.[%a%d]+$") then + if not suffix or suffix == "" then + return filename + elseif not find(filename,"%.[%a%d]+$") then return filename .. "." .. suffix else return filename @@ -1717,20 +2069,39 @@ function file.nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$","")) end -function file.extname(name) - return match(name,"^.+%.([^/\\]-)$") or "" +function file.extname(name,default) + return match(name,"^.+%.([^/\\]-)$") or default or "" end file.suffix = file.extname ---~ print(file.join("x/","/y")) ---~ print(file.join("http://","/y")) ---~ print(file.join("http://a","/y")) ---~ print(file.join("http:///a","/y")) ---~ print(file.join("//nas-1","/y")) +--~ function file.join(...) +--~ local pth = concat({...},"/") +--~ pth = gsub(pth,"\\","/") +--~ local a, b = match(pth,"^(.*://)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ a, b = match(pth,"^(//)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ return (gsub(pth,"//+","/")) +--~ end + +local trick_1 = char(1) +local trick_2 = "^" .. trick_1 .. "/+" function file.join(...) - local pth = concat({...},"/") + local lst = { ... } + local a, b = lst[1], lst[2] + if a == "" then + lst[1] = trick_1 + elseif b and find(a,"^/+$") and find(b,"^/") then + lst[1] = "" + lst[2] = gsub(b,"^/+","") + end + local pth = concat(lst,"/") pth = gsub(pth,"\\","/") local a, b = match(pth,"^(.*://)(.*)$") if a and b then @@ -1740,17 +2111,28 @@ function file.join(...) if a and b then return a .. gsub(b,"//+","/") end + pth = gsub(pth,trick_2,"") return (gsub(pth,"//+","/")) end +--~ print(file.join("//","/y")) +--~ print(file.join("/","/y")) +--~ print(file.join("","/y")) +--~ print(file.join("/x/","/y")) +--~ print(file.join("x/","/y")) +--~ print(file.join("http://","/y")) +--~ print(file.join("http://a","/y")) +--~ print(file.join("http:///a","/y")) +--~ print(file.join("//nas-1","/y")) + function file.iswritable(name) local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,".")) - return a and a.permissions:sub(2,2) == "w" + return a and sub(a.permissions,2,2) == "w" end function file.isreadable(name) local a = lfs.attributes(name) - return a and a.permissions:sub(1,1) == "r" + return a and sub(a.permissions,1,1) == "r" end file.is_readable = file.isreadable @@ -1758,36 +2140,50 @@ file.is_writable = file.iswritable -- todo: lpeg -function file.split_path(str) - local t = { } - str = gsub(str,"\\", "/") - str = gsub(str,"(%a):([;/])", "%1\001%2") - for name in gmatch(str,"([^;:]+)") do - if name ~= "" then - t[#t+1] = gsub(name,"\001",":") - end - end - return t +--~ function file.split_path(str) +--~ local t = { } +--~ str = gsub(str,"\\", "/") +--~ str = gsub(str,"(%a):([;/])", "%1\001%2") +--~ for name in gmatch(str,"([^;:]+)") do +--~ if name ~= "" then +--~ t[#t+1] = gsub(name,"\001",":") +--~ end +--~ end +--~ return t +--~ end + +local checkedsplit = string.checkedsplit + +function file.split_path(str,separator) + str = gsub(str,"\\","/") + return checkedsplit(str,separator or io.pathseparator) end function file.join_path(tab) return concat(tab,io.pathseparator) -- can have trailing // end +-- we can hash them weakly + function file.collapse_path(str) - str = gsub(str,"/%./","/") - local n, m = 1, 1 - while n > 0 or m > 0 do - str, n = gsub(str,"[^/%.]+/%.%.$","") - str, m = gsub(str,"[^/%.]+/%.%./","") - end - str = gsub(str,"([^/])/$","%1") - str = gsub(str,"^%./","") - str = gsub(str,"/%.$","") + str = gsub(str,"\\","/") + if find(str,"/") then + str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified + str = gsub(str,"/%./","/") + local n, m = 1, 1 + while n > 0 or m > 0 do + str, n = gsub(str,"[^/%.]+/%.%.$","") + str, m = gsub(str,"[^/%.]+/%.%./","") + end + str = gsub(str,"([^/])/$","%1") + -- str = gsub(str,"^%./","") -- ./xx in qualified + str = gsub(str,"/%.$","") + end if str == "" then str = "." end return str end +--~ print(file.collapse_path("/a")) --~ print(file.collapse_path("a/./b/..")) --~ print(file.collapse_path("a/aa/../b/bb")) --~ print(file.collapse_path("a/../..")) @@ -1817,27 +2213,27 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1 --~ function file.extname(name) ---~ return pattern:match(name) or "" +--~ return lpegmatch(pattern,name) or "" --~ end --~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1) --~ function file.removesuffix(name) ---~ return pattern:match(name) +--~ return lpegmatch(pattern,name) --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1 --~ function file.basename(name) ---~ return pattern:match(name) or name +--~ return lpegmatch(pattern,name) or name --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1 --~ function file.dirname(name) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) +--~ return sub(name,1,p-2) --~ else --~ return "" --~ end @@ -1846,7 +2242,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.addsuffix(name, suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then --~ return name --~ else @@ -1857,9 +2253,9 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.replacesuffix(name,suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) .. "." .. suffix +--~ return sub(name,1,p-2) .. "." .. suffix --~ else --~ return name .. "." .. suffix --~ end @@ -1868,11 +2264,11 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1 --~ function file.nameonly(name) ---~ local a, b = pattern:match(name) +--~ local a, b = lpegmatch(pattern,name) --~ if b then ---~ return name:sub(a,b-2) +--~ return sub(name,a,b-2) --~ elseif a then ---~ return name:sub(a) +--~ return sub(name,a) --~ else --~ return name --~ end @@ -1906,11 +2302,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":") -- ./name ../name /name c: :// name/name function file.is_qualified_path(filename) - return qualified:match(filename) + return lpegmatch(qualified,filename) ~= nil end function file.is_rootbased_path(filename) - return rootbased:match(filename) + return lpegmatch(rootbased,filename) ~= nil end local slash = lpeg.S("\\/") @@ -1923,16 +2319,25 @@ local base = lpeg.C((1-suffix)^0) local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc("")) function file.splitname(str) -- returns drive, path, base, suffix - return pattern:match(str) + return lpegmatch(pattern,str) end --- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end -- -- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } -- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } -- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } -- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } +--~ -- todo: +--~ +--~ if os.type == "windows" then +--~ local currentdir = lfs.currentdir +--~ function lfs.currentdir() +--~ return (gsub(currentdir(),"\\","/")) +--~ end +--~ end + end -- of closure @@ -1997,7 +2402,7 @@ end function file.loadchecksum(name) if md5 then local data = io.loaddata(name .. ".md5") - return data and data:gsub("%s","") + return data and (gsub(data,"%s","")) end return nil end @@ -2018,14 +2423,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-url'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local char, gmatch = string.char, string.gmatch +local char, gmatch, gsub = string.char, string.gmatch, string.gsub local tonumber, type = tonumber, type +local lpegmatch = lpeg.match -- from the spec (on the web): -- @@ -2049,7 +2455,9 @@ local hexdigit = lpeg.R("09","AF","af") local plus = lpeg.P("+") local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar) -local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^0) * colon + lpeg.Cc("") +-- we assume schemes with more than 1 character (in order to avoid problems with windows disks) + +local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("") local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("") local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("") local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("") @@ -2057,25 +2465,51 @@ local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0 local parser = lpeg.Ct(scheme * authority * path * query * fragment) +-- todo: reconsider Ct as we can as well have five return values (saves a table) +-- so we can have two parsers, one with and one without + function url.split(str) - return (type(str) == "string" and parser:match(str)) or str + return (type(str) == "string" and lpegmatch(parser,str)) or str end +-- todo: cache them + function url.hashed(str) local s = url.split(str) + local somescheme = s[1] ~= "" return { - scheme = (s[1] ~= "" and s[1]) or "file", + scheme = (somescheme and s[1]) or "file", authority = s[2], - path = s[3], - query = s[4], - fragment = s[5], - original = str + path = s[3], + query = s[4], + fragment = s[5], + original = str, + noscheme = not somescheme, } end +function url.hasscheme(str) + return url.split(str)[1] ~= "" +end + +function url.addscheme(str,scheme) + return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str) +end + +function url.construct(hash) + local fullurl = hash.sheme .. "://".. hash.authority .. hash.path + if hash.query then + fullurl = fullurl .. "?".. hash.query + end + if hash.fragment then + fullurl = fullurl .. "?".. hash.fragment + end + return fullurl +end + function url.filename(filename) local t = url.hashed(filename) - return (t.scheme == "file" and t.path:gsub("^/([a-zA-Z])([:|])/)","%1:")) or filename + return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename end function url.query(str) @@ -2129,24 +2563,59 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-dir'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- dir.expand_name will be merged with cleanpath and collapsepath + local type = type -local find, gmatch = string.find, string.gmatch +local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local lpegmatch = lpeg.match dir = dir or { } +-- handy + +function dir.current() + return (gsub(lfs.currentdir(),"\\","/")) +end + -- optimizing for no string.find (*) does not save time local attributes = lfs.attributes local walkdir = lfs.dir -local function glob_pattern(path,patt,recurse,action) +local function glob_pattern(path,patt,recurse,action) + local ok, scanner + if path == "/" then + ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe + else + ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe + end + if ok and type(scanner) == "function" then + if not find(path,"/$") then path = path .. '/' end + for name in scanner do + local full = path .. name + local mode = attributes(full,'mode') + if mode == 'file' then + if find(full,patt) then + action(full) + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + glob_pattern(full,patt,recurse,action) + end + end + end +end + +dir.glob_pattern = glob_pattern + +local function collect_pattern(path,patt,recurse,result) local ok, scanner + result = result or { } if path == "/" then ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe else @@ -2156,19 +2625,22 @@ local function glob_pattern(path,patt,recurse,action) if not find(path,"/$") then path = path .. '/' end for name in scanner do local full = path .. name - local mode = attributes(full,'mode') + local attr = attributes(full) + local mode = attr.mode if mode == 'file' then if find(full,patt) then - action(full) + result[name] = attr end elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then - glob_pattern(full,patt,recurse,action) + attr.list = collect_pattern(full,patt,recurse) + result[name] = attr end end end + return result end -dir.glob_pattern = glob_pattern +dir.collect_pattern = collect_pattern local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V @@ -2189,29 +2661,48 @@ local filter = Cs ( ( )^0 ) local function glob(str,t) - if type(str) == "table" then - local t = t or { } - for s=1,#str do - glob(str[s],t) + if type(t) == "function" then + if type(str) == "table" then + for s=1,#str do + glob(str[s],t) + end + elseif lfs.isfile(str) then + t(str) + else + local split = lpegmatch(pattern,str) + if split then + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,t) + end end - return t - elseif lfs.isfile(str) then - local t = t or { } - t[#t+1] = str - return t else - local split = pattern:match(str) - if split then + if type(str) == "table" then local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = filter:match(start .. base) - glob_pattern(start,result,recurse,action) + for s=1,#str do + glob(str[s],t) + end + return t + elseif lfs.isfile(str) then + local t = t or { } + t[#t+1] = str return t else - return { } + local split = lpegmatch(pattern,str) + if split then + local t = t or { } + local action = action or function(name) t[#t+1] = name end + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,action) + return t + else + return { } + end end end end @@ -2273,11 +2764,12 @@ end local make_indeed = true -- false -if string.find(os.getenv("PATH"),";") then +if string.find(os.getenv("PATH"),";") then -- os.type == "windows" function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2288,13 +2780,13 @@ if string.find(os.getenv("PATH"),";") then end local first, middle, last local drive = false - first, middle, last = str:match("^(//)(//*)(.*)$") + first, middle, last = match(str,"^(//)(//*)(.*)$") if first then -- empty network path == local path else - first, last = str:match("^(//)/*(.-)$") + first, last = match(str,"^(//)/*(.-)$") if first then - middle, last = str:match("([^/]+)/+(.-)$") + middle, last = match(str,"([^/]+)/+(.-)$") if middle then pth = "//" .. middle else @@ -2302,11 +2794,11 @@ if string.find(os.getenv("PATH"),";") then last = "" end else - first, middle, last = str:match("^([a-zA-Z]:)(/*)(.-)$") + first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") if first then pth, drive = first .. middle, true else - middle, last = str:match("^(/*)(.-)$") + middle, last = match(str,"^(/*)(.-)$") if not middle then last = str end @@ -2340,34 +2832,31 @@ if string.find(os.getenv("PATH"),";") then --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) - local first, nothing, last = str:match("^(//)(//*)(.*)$") + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath + local first, nothing, last = match(str,"^(//)(//*)(.*)$") if first then - first = lfs.currentdir() .. "/" - first = first:gsub("\\","/") + first = dir.current() .. "/" end if not first then - first, last = str:match("^(//)/*(.*)$") + first, last = match(str,"^(//)/*(.*)$") end if not first then - first, last = str:match("^([a-zA-Z]:)(.*)$") + first, last = match(str,"^([a-zA-Z]:)(.*)$") if first and not find(last,"^/") then local d = lfs.currentdir() if lfs.chdir(first) then - first = lfs.currentdir() - first = first:gsub("\\","/") + first = dir.current() end lfs.chdir(d) end end if not first then - first, last = lfs.currentdir(), str - first = first:gsub("\\","/") + first, last = dir.current(), str end - last = last:gsub("//","/") - last = last:gsub("/%./","/") - last = last:gsub("^/*","") - first = first:gsub("/*$","") + last = gsub(last,"//","/") + last = gsub(last,"/%./","/") + last = gsub(last,"^/*","") + first = gsub(first,"/*$","") if last == "" then return first else @@ -2378,8 +2867,9 @@ if string.find(os.getenv("PATH"),";") then else function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2388,7 +2878,7 @@ else end end end - str = str:gsub("/+","/") + str = gsub(str,"/+","/") if find(str,"^/") then pth = "/" for s in gmatch(str,"[^/]+") do @@ -2422,12 +2912,12 @@ else --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath if not find(str,"^/") then str = lfs.currentdir() .. "/" .. str end - str = str:gsub("//","/") - str = str:gsub("/%./","/") + str = gsub(str,"//","/") + str = gsub(str,"/%./","/") return str end @@ -2442,7 +2932,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-boolean'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2503,19 +2993,40 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-unicode'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +if not unicode then + + unicode = { utf8 = { } } + + local floor, char = math.floor, string.char + + function unicode.utf8.utfchar(n) + if n < 0x80 then + return char(n) + elseif n < 0x800 then + return char(0xC0 + floor(n/0x40)) .. char(0x80 + (n % 0x40)) + elseif n < 0x10000 then + return char(0xE0 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + elseif n < 0x40000 then + return char(0xF0 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + else -- wrong: + -- return char(0xF1 + floor(n/0x1000000)) .. char(0x80 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + return "?" + end + end + +end + utf = utf or unicode.utf8 local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub local char, byte, find, bytepairs = string.char, string.byte, string.find, string.bytepairs -unicode = unicode or { } - -- 0 EF BB BF UTF-8 -- 1 FF FE UTF-16-little-endian -- 2 FE FF UTF-16-big-endian @@ -2530,14 +3041,20 @@ unicode.utfname = { [4] = 'utf-32-be' } -function unicode.utftype(f) -- \000 fails ! +-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated + +function unicode.utftype(f) local str = f:read(4) if not str then f:seek('set') return 0 - elseif find(str,"^%z%z\254\255") then + -- elseif find(str,"^%z%z\254\255") then -- depricated + -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged + elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH) return 4 - elseif find(str,"^\255\254%z%z") then + -- elseif find(str,"^\255\254%z%z") then -- depricated + -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged + elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH) return 3 elseif find(str,"^\254\255") then f:seek('set',2) @@ -2681,7 +3198,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-math'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2728,7 +3245,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-utils'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2736,6 +3253,10 @@ if not modules then modules = { } end modules ['l-utils'] = { -- hm, quite unreadable +local gsub = string.gsub +local concat = table.concat +local type, next = type, next + if not utils then utils = { } end if not utils.merger then utils.merger = { } end if not utils.lua then utils.lua = { } end @@ -2773,7 +3294,7 @@ function utils.merger._self_load_(name) end if data and utils.merger.strip_comment then -- saves some 20K - data = data:gsub("%-%-~[^\n\r]*[\r\n]", "") + data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "") end return data or "" end @@ -2791,7 +3312,7 @@ end function utils.merger._self_swap_(data,code) if data ~= "" then - return (data:gsub(utils.merger.pattern, function(s) + return (gsub(data,utils.merger.pattern, function(s) return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n" end, 1)) else @@ -2801,8 +3322,8 @@ end --~ stripper: --~ ---~ data = string.gsub(data,"%-%-~[^\n]*\n","") ---~ data = string.gsub(data,"\n\n+","\n") +--~ data = gsub(data,"%-%-~[^\n]*\n","") +--~ data = gsub(data,"\n\n+","\n") function utils.merger._self_libs_(libs,list) local result, f, frozen = { }, nil, false @@ -2810,9 +3331,10 @@ function utils.merger._self_libs_(libs,list) if type(libs) == 'string' then libs = { libs } end if type(list) == 'string' then list = { list } end local foundpath = nil - for _, lib in ipairs(libs) do - for _, pth in ipairs(list) do - pth = string.gsub(pth,"\\","/") -- file.clean_path + for i=1,#libs do + local lib = libs[i] + for j=1,#list do + local pth = gsub(list[j],"\\","/") -- file.clean_path utils.report("checking library path %s",pth) local name = pth .. "/" .. lib if lfs.isfile(name) then @@ -2824,7 +3346,8 @@ function utils.merger._self_libs_(libs,list) if foundpath then utils.report("using library path %s",foundpath) local right, wrong = { }, { } - for _, lib in ipairs(libs) do + for i=1,#libs do + local lib = libs[i] local fullname = foundpath .. "/" .. lib if lfs.isfile(fullname) then -- right[#right+1] = lib @@ -2838,15 +3361,15 @@ function utils.merger._self_libs_(libs,list) end end if #right > 0 then - utils.report("merged libraries: %s",table.concat(right," ")) + utils.report("merged libraries: %s",concat(right," ")) end if #wrong > 0 then - utils.report("skipped libraries: %s",table.concat(wrong," ")) + utils.report("skipped libraries: %s",concat(wrong," ")) end else utils.report("no valid library path found") end - return table.concat(result, "\n\n") + return concat(result, "\n\n") end function utils.merger.selfcreate(libs,list,target) @@ -2904,16 +3427,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end + aux = aux or { } local concat, format, gmatch = table.concat, string.format, string.gmatch local tostring, type = tostring, type +local lpegmatch = lpeg.match + +local P, R, V = lpeg.P, lpeg.R, lpeg.V + +local escape, left, right = P("\\"), P('{'), P('}') + +lpeg.patterns.balanced = P { + [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, + [2] = left * V(1) * right +} local space = lpeg.P(' ') local equal = lpeg.P("=") @@ -2921,7 +3456,7 @@ local comma = lpeg.P(",") local lbrace = lpeg.P("{") local rbrace = lpeg.P("}") local nobrace = 1 - (lbrace+rbrace) -local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace } +local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace } local spaces = space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) @@ -2959,13 +3494,13 @@ function aux.make_settings_to_hash_pattern(set,how) end end -function aux.settings_to_hash(str) +function aux.settings_to_hash(str,existing) if str and str ~= "" then - hash = { } + hash = existing or { } if moretolerant then - pattern_b_s:match(str) + lpegmatch(pattern_b_s,str) else - pattern_a_s:match(str) + lpegmatch(pattern_a_s,str) end return hash else @@ -2973,39 +3508,41 @@ function aux.settings_to_hash(str) end end -function aux.settings_to_hash_tolerant(str) +function aux.settings_to_hash_tolerant(str,existing) if str and str ~= "" then - hash = { } - pattern_b_s:match(str) + hash = existing or { } + lpegmatch(pattern_b_s,str) return hash else return { } end end -function aux.settings_to_hash_strict(str) +function aux.settings_to_hash_strict(str,existing) if str and str ~= "" then - hash = { } - pattern_c_s:match(str) + hash = existing or { } + lpegmatch(pattern_c_s,str) return next(hash) and hash else return nil end end -local seperator = comma * space^0 +local separator = comma * space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) -local pattern = lpeg.Ct(value*(seperator*value)^0) +local pattern = lpeg.Ct(value*(separator*value)^0) -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored aux.settings_to_array_pattern = pattern +-- we could use a weak table as cache + function aux.settings_to_array(str) if not str or str == "" then return { } else - return pattern:match(str) + return lpegmatch(pattern,str) end end @@ -3014,10 +3551,10 @@ local function set(t,v) end local value = lpeg.P(lpeg.Carg(1)*value) / set -local pattern = value*(seperator*value)^0 * lpeg.Carg(1) +local pattern = value*(separator*value)^0 * lpeg.Carg(1) function aux.add_settings_to_array(t,str) - return pattern:match(str, nil, t) + return lpegmatch(pattern,str,nil,t) end function aux.hash_to_string(h,separator,yes,no,strict,omit) @@ -3065,6 +3602,13 @@ function aux.settings_to_set(str,t) return t end +local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace +local pattern = lpeg.Ct((space + value)^0) + +function aux.arguments_to_table(str) + return lpegmatch(pattern,str) +end + -- temporary here function aux.getparameters(self,class,parentclass,settings) @@ -3073,36 +3617,31 @@ function aux.getparameters(self,class,parentclass,settings) sc = table.clone(self[parent]) self[class] = sc end - aux.add_settings_to_array(sc, settings) + aux.settings_to_hash(settings,sc) end -- temporary here -local digit = lpeg.R("09") -local period = lpeg.P(".") -local zero = lpeg.P("0") - ---~ local finish = lpeg.P(-1) ---~ local nodigit = (1-digit) + finish ---~ local case_1 = (period * zero^1 * #nodigit)/"" -- .000 ---~ local case_2 = (period * (1-(zero^0/"") * #nodigit)^1 * (zero^0/"") * nodigit) -- .010 .10 .100100 - +local digit = lpeg.R("09") +local period = lpeg.P(".") +local zero = lpeg.P("0") local trailingzeros = zero^0 * -digit -- suggested by Roberto R -local case_1 = period * trailingzeros / "" -local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") - -local number = digit^1 * (case_1 + case_2) -local stripper = lpeg.Cs((number + 1)^0) +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = lpeg.Cs((number + 1)^0) --~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" --~ collectgarbage("collect") --~ str = string.rep(sample,10000) --~ local ts = os.clock() ---~ stripper:match(str) ---~ print(#str, os.clock()-ts, stripper:match(sample)) +--~ lpegmatch(stripper,str) +--~ print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + +lpeg.patterns.strip_zeros = stripper function aux.strip_zeros(str) - return stripper:match(str) + return lpegmatch(stripper,str) end function aux.definetable(target) -- defines undefined tables @@ -3126,6 +3665,24 @@ function aux.accesstable(target) return t end +-- as we use this a lot ... + +--~ function aux.cachefunction(action,weak) +--~ local cache = { } +--~ if weak then +--~ setmetatable(cache, { __mode = "kv" } ) +--~ end +--~ local function reminder(str) +--~ local found = cache[str] +--~ if not found then +--~ found = action(str) +--~ cache[str] = found +--~ end +--~ return found +--~ end +--~ return reminder, cache +--~ end + end -- of closure @@ -3133,7 +3690,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-tra'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-tra.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3143,12 +3700,17 @@ if not modules then modules = { } end modules ['trac-tra'] = { -- bound to a variable, like node.new, node.copy etc (contrary to for instance -- node.has_attribute which is bound to a has_attribute local variable in mkiv) +local debug = require "debug" + +local getinfo = debug.getinfo +local type, next = type, next +local concat = table.concat +local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub + debugger = debugger or { } local counters = { } local names = { } -local getinfo = debug.getinfo -local format, find, lower, gmatch = string.format, string.find, string.lower, string.gmatch -- one @@ -3187,10 +3749,10 @@ function debugger.showstats(printer,threshold) local total, grandtotal, functions = 0, 0, 0 printer("\n") -- ugly but ok -- table.sort(counters) - for func, count in pairs(counters) do + for func, count in next, counters do if count > threshold then local name = getname(func) - if not name:find("for generator") then + if not find(name,"for generator") then printer(format("%8i %s", count, name)) total = total + count end @@ -3222,7 +3784,7 @@ end --~ local total, grandtotal, functions = 0, 0, 0 --~ printer("\n") -- ugly but ok --~ -- table.sort(counters) ---~ for func, count in pairs(counters) do +--~ for func, count in next, counters do --~ if count > threshold then --~ printer(format("%8i %s", count, func)) --~ total = total + count @@ -3276,38 +3838,77 @@ end --~ print("") --~ debugger.showstats(print,3) -trackers = trackers or { } +setters = setters or { } +setters.data = setters.data or { } -local data, done = { }, { } +--~ local function set(t,what,value) +--~ local data, done = t.data, t.done +--~ if type(what) == "string" then +--~ what = aux.settings_to_array(what) -- inefficient but ok +--~ end +--~ for i=1,#what do +--~ local w = what[i] +--~ for d, f in next, data do +--~ if done[d] then +--~ -- prevent recursion due to wildcards +--~ elseif find(d,w) then +--~ done[d] = true +--~ for i=1,#f do +--~ f[i](value) +--~ end +--~ end +--~ end +--~ end +--~ end -local function set(what,value) +local function set(t,what,value) + local data, done = t.data, t.done if type(what) == "string" then - what = aux.settings_to_array(what) + what = aux.settings_to_hash(what) -- inefficient but ok end - for i=1,#what do - local w = what[i] + for w, v in next, what do + if v == "" then + v = value + else + v = toboolean(v) + end for d, f in next, data do if done[d] then -- prevent recursion due to wildcards elseif find(d,w) then done[d] = true for i=1,#f do - f[i](value) + f[i](v) end end end end end -local function reset() - for d, f in next, data do +local function reset(t) + for d, f in next, t.data do for i=1,#f do f[i](false) end end end -function trackers.register(what,...) +local function enable(t,what) + set(t,what,true) +end + +local function disable(t,what) + local data = t.data + if not what or what == "" then + t.done = { } + reset(t) + else + set(t,what,false) + end +end + +function setters.register(t,what,...) + local data = t.data what = lower(what) local w = data[what] if not w then @@ -3319,32 +3920,32 @@ function trackers.register(what,...) if typ == "function" then w[#w+1] = fnc elseif typ == "string" then - w[#w+1] = function(value) set(fnc,value,nesting) end + w[#w+1] = function(value) set(t,fnc,value,nesting) end end end end -function trackers.enable(what) - done = { } - set(what,true) +function setters.enable(t,what) + local e = t.enable + t.enable, t.done = enable, { } + enable(t,string.simpleesc(tostring(what))) + t.enable, t.done = e, { } end -function trackers.disable(what) - done = { } - if not what or what == "" then - trackers.reset(what) - else - set(what,false) - end +function setters.disable(t,what) + local e = t.disable + t.disable, t.done = disable, { } + disable(t,string.simpleesc(tostring(what))) + t.disable, t.done = e, { } end -function trackers.reset(what) - done = { } - reset() +function setters.reset(t) + t.done = { } + reset(t) end -function trackers.list() -- pattern - local list = table.sortedkeys(data) +function setters.list(t) -- pattern + local list = table.sortedkeys(t.data) local user, system = { }, { } for l=1,#list do local what = list[l] @@ -3357,6 +3958,78 @@ function trackers.list() -- pattern return user, system end +function setters.show(t) + commands.writestatus("","") + local list = setters.list(t) + for k=1,#list do + commands.writestatus(t.name,list[k]) + end + commands.writestatus("","") +end + +-- we could have used a bit of oo and the trackers:enable syntax but +-- there is already a lot of code around using the singular tracker + +-- we could make this into a module + +function setters.new(name) + local t + t = { + data = { }, + name = name, + enable = function(...) setters.enable (t,...) end, + disable = function(...) setters.disable (t,...) end, + register = function(...) setters.register(t,...) end, + list = function(...) setters.list (t,...) end, + show = function(...) setters.show (t,...) end, + } + setters.data[name] = t + return t +end + +trackers = setters.new("trackers") +directives = setters.new("directives") +experiments = setters.new("experiments") + +-- nice trick: we overload two of the directives related functions with variants that +-- do tracing (itself using a tracker) .. proof of concept + +local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) +local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) + +local e = directives.enable +local d = directives.disable + +function directives.enable(...) + commands.writestatus("directives","enabling: %s",concat({...}," ")) + e(...) +end + +function directives.disable(...) + commands.writestatus("directives","disabling: %s",concat({...}," ")) + d(...) +end + +local e = experiments.enable +local d = experiments.disable + +function experiments.enable(...) + commands.writestatus("experiments","enabling: %s",concat({...}," ")) + e(...) +end + +function experiments.disable(...) + commands.writestatus("experiments","disabling: %s",concat({...}," ")) + d(...) +end + +-- a useful example + +directives.register("system.nostatistics", function(v) + statistics.enable = not v +end) + + end -- of closure @@ -3364,7 +4037,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-env'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3376,10 +4049,10 @@ if not modules then modules = { } end modules ['luat-env'] = { -- evolved before bytecode arrays were available and so a lot of -- code has disappeared already. -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local format = string.format +local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find +local unquote, quote = string.unquote, string.quote -- precautions @@ -3413,13 +4086,14 @@ if not environment.jobname then environ function environment.initialize_arguments(arg) local arguments, files = { }, { } environment.arguments, environment.files, environment.sortedflags = arguments, files, nil - for index, argument in pairs(arg) do + for index=1,#arg do + local argument = arg[index] if index > 0 then - local flag, value = argument:match("^%-+(.+)=(.-)$") + local flag, value = match(argument,"^%-+(.-)=(.-)$") if flag then - arguments[flag] = string.unquote(value or "") + arguments[flag] = unquote(value or "") else - flag = argument:match("^%-+(.+)") + flag = match(argument,"^%-+(.+)") if flag then arguments[flag] = true else @@ -3446,25 +4120,30 @@ function environment.argument(name,partial) return arguments[name] elseif partial then if not sortedflags then - sortedflags = { } - for _,v in pairs(table.sortedkeys(arguments)) do - sortedflags[#sortedflags+1] = "^" .. v + sortedflags = table.sortedkeys(arguments) + for k=1,#sortedflags do + sortedflags[k] = "^" .. sortedflags[k] end environment.sortedflags = sortedflags end -- example of potential clash: ^mode ^modefile - for _,v in ipairs(sortedflags) do - if name:find(v) then - return arguments[v:sub(2,#v)] + for k=1,#sortedflags do + local v = sortedflags[k] + if find(name,v) then + return arguments[sub(v,2,#v)] end end end return nil end +environment.argument("x",true) + function environment.split_arguments(separator) -- rather special, cut-off before separator local done, before, after = false, { }, { } - for _,v in ipairs(environment.original_arguments) do + local original_arguments = environment.original_arguments + for k=1,#original_arguments do + local v = original_arguments[k] if not done and v == separator then done = true elseif done then @@ -3481,16 +4160,17 @@ function environment.reconstruct_commandline(arg,noquote) if noquote and #arg == 1 then local a = arg[1] a = resolvers.resolve(a) - a = a:unquote() + a = unquote(a) return a - elseif next(arg) then + elseif #arg > 0 then local result = { } - for _,a in ipairs(arg) do -- ipairs 1 .. #n + for i=1,#arg do + local a = arg[i] a = resolvers.resolve(a) - a = a:unquote() - a = a:gsub('"','\\"') -- tricky - if a:find(" ") then - result[#result+1] = a:quote() + a = unquote(a) + a = gsub(a,'"','\\"') -- tricky + if find(a," ") then + result[#result+1] = quote(a) else result[#result+1] = a end @@ -3503,17 +4183,18 @@ end if arg then - -- new, reconstruct quoted snippets (maybe better just remnove the " then and add them later) + -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) local newarg, instring = { }, false - for index, argument in ipairs(arg) do - if argument:find("^\"") then - newarg[#newarg+1] = argument:gsub("^\"","") - if not argument:find("\"$") then + for index=1,#arg do + local argument = arg[index] + if find(argument,"^\"") then + newarg[#newarg+1] = gsub(argument,"^\"","") + if not find(argument,"\"$") then instring = true end - elseif argument:find("\"$") then - newarg[#newarg] = newarg[#newarg] .. " " .. argument:gsub("\"$","") + elseif find(argument,"\"$") then + newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") instring = false elseif instring then newarg[#newarg] = newarg[#newarg] .. " " .. argument @@ -3568,12 +4249,12 @@ function environment.luafilechunk(filename) -- used for loading lua bytecode in filename = file.replacesuffix(filename, "lua") local fullname = environment.luafile(filename) if fullname and fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading file %s", fullname) end return environment.loadedluacode(fullname) else - if trace_verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end return nil @@ -3593,7 +4274,7 @@ function environment.loadluafile(filename, version) -- when not overloaded by explicit suffix we look for a luc file first local fullname = (lucname and environment.luafile(lucname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check @@ -3611,7 +4292,7 @@ function environment.loadluafile(filename, version) if v == version then return true else - if trace_verbose then + if trace_locating then logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version) end environment.loadluafile(filename) @@ -3622,12 +4303,12 @@ function environment.loadluafile(filename, version) end fullname = (luaname and environment.luafile(luaname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check if not chunk then - if verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end else @@ -3645,7 +4326,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-inf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-inf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3670,6 +4351,14 @@ function statistics.hastimer(instance) return instance and instance.starttime end +function statistics.resettiming(instance) + if not instance then + notimer = { timing = 0, loadtime = 0 } + else + instance.timing, instance.loadtime = 0, 0 + end +end + function statistics.starttiming(instance) if not instance then notimer = { } @@ -3684,6 +4373,8 @@ function statistics.starttiming(instance) if not instance.loadtime then instance.loadtime = 0 end + else +--~ logs.report("system","nested timing (%s)",tostring(instance)) end instance.timing = it + 1 end @@ -3729,6 +4420,12 @@ function statistics.elapsedindeed(instance) return t > statistics.threshold end +function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds + if statistics.elapsedindeed(instance) then + return format("%s seconds %s", statistics.elapsedtime(instance),rest or "") + end +end + -- general function function statistics.register(tag,fnc) @@ -3807,14 +4504,32 @@ function statistics.timed(action,report) report("total runtime: %s",statistics.elapsedtime(timer)) end +-- where, not really the best spot for this: + +commands = commands or { } + +local timer + +function commands.resettimer() + statistics.resettiming(timer) + statistics.starttiming(timer) +end + +function commands.elapsedtime() + statistics.stoptiming(timer) + tex.sprint(statistics.elapsedtime(timer)) +end + +commands.resettimer() + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['luat-log'] = { +if not modules then modules = { } end modules ['trac-log'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-log.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3822,7 +4537,11 @@ if not modules then modules = { } end modules ['luat-log'] = { -- this is old code that needs an overhaul -local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format +--~ io.stdout:setvbuf("no") +--~ io.stderr:setvbuf("no") + +local write_nl, write = texio.write_nl or print, texio.write or io.write +local format, gmatch = string.format, string.gmatch local texcount = tex and tex.count if texlua then @@ -3903,25 +4622,48 @@ function logs.tex.line(fmt,...) -- new end end +--~ function logs.tex.start_page_number() +--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +--~ if real > 0 then +--~ if user > 0 then +--~ if sub > 0 then +--~ write(format("[%s.%s.%s",real,user,sub)) +--~ else +--~ write(format("[%s.%s",real,user)) +--~ end +--~ else +--~ write(format("[%s",real)) +--~ end +--~ else +--~ write("[-") +--~ end +--~ end + +--~ function logs.tex.stop_page_number() +--~ write("]") +--~ end + +local real, user, sub + function logs.tex.start_page_number() - local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno + real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +end + +function logs.tex.stop_page_number() if real > 0 then if user > 0 then if sub > 0 then - write(format("[%s.%s.%s",real,user,sub)) + logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub) else - write(format("[%s.%s",real,user)) + logs.report("pages", "flushing realpage %s, userpage %s",real,user) end else - write(format("[%s",real)) + logs.report("pages", "flushing realpage %s",real) end else - write("[-") + logs.report("pages", "flushing page") end -end - -function logs.tex.stop_page_number() - write("]") + io.flush() end logs.tex.report_job_stat = statistics.show_job_stat @@ -4021,7 +4763,7 @@ end function logs.setprogram(_name_,_banner_,_verbose_) name, banner = _name_, _banner_ if _verbose_ then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") end logs.set_method("tex") logs.report = report -- also used in libraries @@ -4034,9 +4776,9 @@ end function logs.setverbose(what) if what then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") else - trackers.disable("resolvers.verbose") + trackers.disable("resolvers.locating") end logs.verbose = what or false end @@ -4053,7 +4795,7 @@ logs.report = logs.tex.report logs.simple = logs.tex.report function logs.reportlines(str) -- todo: - for line in str:gmatch("(.-)[\n\r]") do + for line in gmatch(str,"(.-)[\n\r]") do logs.report(line) end end @@ -4064,8 +4806,12 @@ end logs.simpleline = logs.reportline -function logs.help(message,option) +function logs.reportbanner() -- for scripts too logs.report(banner) +end + +function logs.help(message,option) + logs.reportbanner() logs.reportline() logs.reportlines(message) local moreinfo = logs.moreinfo or "" @@ -4097,6 +4843,11 @@ end --~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") --~ end +function logs.fatal(where,...) + logs.report(where,"fatal error: %s, aborting now",format(...)) + os.exit() +end + end -- of closure @@ -4104,10 +4855,10 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", - comment = "companion to luat-lib.tex", } -- After a few years using the code the large luat-inp.lua file @@ -4119,7 +4870,7 @@ if not modules then modules = { } end modules ['data-inp'] = { -- * some public auxiliary functions were made private -- -- TODO: os.getenv -> os.env[] --- TODO: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller) +-- TODO: instances.[hashes,cnffiles,configurations,522] -- TODO: check escaping in find etc, too much, too slow -- This lib is multi-purpose and can be loaded again later on so that @@ -4140,12 +4891,13 @@ if not modules then modules = { } end modules ['data-inp'] = { local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys local next, type = next, type +local lpegmatch = lpeg.match -local trace_locating, trace_detail, trace_verbose = false, false, false +local trace_locating, trace_detail, trace_expansions = false, false, false -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) -trackers.register("resolvers.detail", function(v) trace_detail = v trackers.enable("resolvers.verbose,resolvers.detail") end) +trackers.register("resolvers.locating", function(v) trace_locating = v end) +trackers.register("resolvers.details", function(v) trace_detail = v end) +trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo if not resolvers then resolvers = { @@ -4169,7 +4921,7 @@ resolvers.generators.notfound = { nil } resolvers.cacheversion = '1.0.1' resolvers.cnfname = 'texmf.cnf' resolvers.luaname = 'texmfcnf.lua' -resolvers.homedir = os.env[os.platform == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' +resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}' local dummy_path_expr = "^!*unset/*$" @@ -4211,8 +4963,8 @@ suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' } alternatives['map files'] = 'map' alternatives['enc files'] = 'enc' -alternatives['cid files'] = 'cid' -alternatives['fea files'] = 'fea' +alternatives['cid maps'] = 'cid' -- great, why no cid files +alternatives['font feature files'] = 'fea' -- and fea files here alternatives['opentype fonts'] = 'otf' alternatives['truetype fonts'] = 'ttf' alternatives['truetype collections'] = 'ttc' @@ -4228,6 +4980,11 @@ formats ['sfd'] = 'SFDFONTS' suffixes ['sfd'] = { 'sfd' } alternatives['subfont definition files'] = 'sfd' +-- lib paths + +formats ['lib'] = 'CLUAINPUTS' -- new (needs checking) +suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' } + -- In practice we will work within one tds tree, but i want to keep -- the option open to build tools that look at multiple trees, which is -- why we keep the tree specific data in a table. We used to pass the @@ -4350,8 +5107,10 @@ local function check_configuration() -- not yet ok, no time for debugging now -- bad luck end fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm - fix("FONTFEATURES", ".;$TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") - fix("FONTCIDMAPS" , ".;$TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- this will go away some day + fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//") end @@ -4366,7 +5125,7 @@ function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail' end end -resolvers.settrace(os.getenv("MTX.resolvers.TRACE") or os.getenv("MTX_INPUT_TRACE")) +resolvers.settrace(os.getenv("MTX_INPUT_TRACE")) function resolvers.osenv(key) local ie = instance.environment @@ -4454,37 +5213,43 @@ end -- work that well; the parsing is ok, but dealing with the resulting -- table is a pain because we need to work inside-out recursively +local function do_first(a,b) + local t = { } + for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end + return "{" .. concat(t,",") .. "}" +end + +local function do_second(a,b) + local t = { } + for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end + return "{" .. concat(t,",") .. "}" +end + +local function do_both(a,b) + local t = { } + for sa in gmatch(a,"[^,]+") do + for sb in gmatch(b,"[^,]+") do + t[#t+1] = sa .. sb + end + end + return "{" .. concat(t,",") .. "}" +end + +local function do_three(a,b,c) + return a .. b.. c +end + local function splitpathexpr(str, t, validate) -- no need for further optimization as it is only called a - -- few times, we can use lpeg for the sub; we could move - -- the local functions outside the body + -- few times, we can use lpeg for the sub + if trace_expansions then + logs.report("fileio","expanding variable '%s'",str) + end t = t or { } str = gsub(str,",}",",@}") str = gsub(str,"{,","{@,") -- str = "@" .. str .. "@" local ok, done - local function do_first(a,b) - local t = { } - for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end - return "{" .. concat(t,",") .. "}" - end - local function do_second(a,b) - local t = { } - for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end - return "{" .. concat(t,",") .. "}" - end - local function do_both(a,b) - local t = { } - for sa in gmatch(a,"[^,]+") do - for sb in gmatch(b,"[^,]+") do - t[#t+1] = sa .. sb - end - end - return "{" .. concat(t,",") .. "}" - end - local function do_three(a,b,c) - return a .. b.. c - end while true do done = false while true do @@ -4515,6 +5280,11 @@ local function splitpathexpr(str, t, validate) t[#t+1] = s end end + if trace_expansions then + for k=1,#t do + logs.report("fileio","% 4i: %s",k,t[k]) + end + end return t end @@ -4554,18 +5324,27 @@ end -- also we now follow the stupid route: if not set then just assume *one* -- cnf file under texmf (i.e. distribution) -resolvers.ownpath = resolvers.ownpath or nil -resolvers.ownbin = resolvers.ownbin or arg[-2] or arg[-1] or arg[0] or "luatex" -resolvers.autoselfdir = true -- false may be handy for debugging +local args = environment and environment.original_arguments or arg -- this needs a cleanup + +resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" +resolvers.ownbin = gsub(resolvers.ownbin,"\\","/") function resolvers.getownpath() - if not resolvers.ownpath then - if resolvers.autoselfdir and os.selfdir then - resolvers.ownpath = os.selfdir - else - local binary = resolvers.ownbin - if os.platform == "windows" then - binary = file.replacesuffix(binary,"exe") + local ownpath = resolvers.ownpath or os.selfdir + if not ownpath or ownpath == "" or ownpath == "unset" then + ownpath = args[-1] or arg[-1] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + if not ownpath or ownpath == "" then + ownpath = args[-0] or arg[-0] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + end + local binary = resolvers.ownbin + if not ownpath or ownpath == "" then + ownpath = ownpath and file.dirname(binary) + end + if not ownpath or ownpath == "" then + if os.binsuffix ~= "" then + binary = file.replacesuffix(binary,os.binsuffix) end for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do local b = file.join(p,binary) @@ -4577,30 +5356,39 @@ function resolvers.getownpath() local olddir = lfs.currentdir() if lfs.chdir(p) then local pp = lfs.currentdir() - if trace_verbose and p ~= pp then - logs.report("fileio","following symlink %s to %s",p,pp) + if trace_locating and p ~= pp then + logs.report("fileio","following symlink '%s' to '%s'",p,pp) end - resolvers.ownpath = pp + ownpath = pp lfs.chdir(olddir) else - if trace_verbose then - logs.report("fileio","unable to check path %s",p) + if trace_locating then + logs.report("fileio","unable to check path '%s'",p) end - resolvers.ownpath = p + ownpath = p end break end end end - if not resolvers.ownpath then resolvers.ownpath = '.' end + if not ownpath or ownpath == "" then + ownpath = "." + logs.report("fileio","forcing fallback ownpath .") + elseif trace_locating then + logs.report("fileio","using ownpath '%s'",ownpath) + end + end + resolvers.ownpath = ownpath + function resolvers.getownpath() + return resolvers.ownpath end - return resolvers.ownpath + return ownpath end local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" } local function identify_own() - local ownpath = resolvers.getownpath() or lfs.currentdir() + local ownpath = resolvers.getownpath() or dir.current() local ie = instance.environment if ownpath then if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end @@ -4613,10 +5401,10 @@ local function identify_own() if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end - if trace_verbose then + if trace_locating then for i=1,#own_places do local v = own_places[i] - logs.report("fileio","variable %s set to %s",v,resolvers.env(v) or "unknown") + logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown") end end identify_own = function() end @@ -4648,10 +5436,8 @@ end local function load_cnf_file(fname) fname = resolvers.clean_path(fname) local lname = file.replacesuffix(fname,'lua') - local f = io.open(lname) - if f then -- this will go - f:close() - local dname = file.dirname(fname) + if lfs.isfile(lname) then + local dname = file.dirname(fname) -- fname ? if not instance.configuration[dname] then resolvers.load_data(dname,'configuration',lname and file.basename(lname)) instance.order[#instance.order+1] = instance.configuration[dname] @@ -4659,8 +5445,8 @@ local function load_cnf_file(fname) else f = io.open(fname) if f then - if trace_verbose then - logs.report("fileio","loading %s", fname) + if trace_locating then + logs.report("fileio","loading configuration file %s", fname) end local line, data, n, k, v local dname = file.dirname(fname) @@ -4694,14 +5480,16 @@ local function load_cnf_file(fname) end end f:close() - elseif trace_verbose then - logs.report("fileio","skipping %s", fname) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'", fname) end end end local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared) - for _,c in ipairs(instance.order) do + local order = instance.order + for i=1,#order do + local c = order[i] for k,v in next, c do if not instance.variables[k] then if instance.environment[k] then @@ -4717,19 +5505,24 @@ end function resolvers.load_cnf() local function loadoldconfigdata() - for _, fname in ipairs(instance.cnffiles) do - load_cnf_file(fname) + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + load_cnf_file(cnffiles[i]) end end -- instance.cnffiles contain complete names now ! + -- we still use a funny mix of cnf and new but soon + -- we will switch to lua exclusively as we only use + -- the file to collect the tree roots if #instance.cnffiles == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)") end else - instance.rootpath = instance.cnffiles[1] - for k,fname in ipairs(instance.cnffiles) do - instance.cnffiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local cnffiles = instance.cnffiles + instance.rootpath = cnffiles[1] + for k=1,#cnffiles do + instance.cnffiles[k] = file.collapse_path(cnffiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -4757,8 +5550,9 @@ function resolvers.load_lua() -- yet harmless else instance.rootpath = instance.luafiles[1] - for k,fname in ipairs(instance.luafiles) do - instance.luafiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local luafiles = instance.luafiles + for k=1,#luafiles do + instance.luafiles[k] = file.collapse_path(luafiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -4790,14 +5584,14 @@ end function resolvers.append_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash append: %s",tag) + logs.report("fileio","hash '%s' appended",tag) end insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } ) end function resolvers.prepend_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash prepend: %s",tag) + logs.report("fileio","hash '%s' prepended",tag) end insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } ) end @@ -4821,9 +5615,11 @@ end -- locators function resolvers.locatelists() - for _, path in ipairs(resolvers.clean_path_list('TEXMF')) do - if trace_verbose then - logs.report("fileio","locating list of %s",path) + local texmfpaths = resolvers.clean_path_list('TEXMF') + for i=1,#texmfpaths do + local path = texmfpaths[i] + if trace_locating then + logs.report("fileio","locating list of '%s'",path) end resolvers.locatedatabase(file.collapse_path(path)) end @@ -4836,11 +5632,11 @@ end function resolvers.locators.tex(specification) if specification and specification ~= '' and lfs.isdir(specification) then if trace_locating then - logs.report("fileio",'! tex locator found: %s',specification) + logs.report("fileio","tex locator '%s' found",specification) end resolvers.append_hash('file',specification,filename) elseif trace_locating then - logs.report("fileio",'? tex locator not found: %s',specification) + logs.report("fileio","tex locator '%s' not found",specification) end end @@ -4854,7 +5650,9 @@ function resolvers.loadfiles() instance.loaderror = false instance.files = { } if not instance.renewcache then - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for k=1,#hashes do + local hash = hashes[k] resolvers.hashdatabase(hash.tag,hash.name) if instance.loaderror then break end end @@ -4868,8 +5666,9 @@ end -- generators: function resolvers.loadlists() - for _, hash in ipairs(instance.hashes) do - resolvers.generatedatabase(hash.tag) + local hashes = instance.hashes + for i=1,#hashes do + resolvers.generatedatabase(hashes[i].tag) end end @@ -4881,10 +5680,27 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.patterns.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification - if trace_verbose then - logs.report("fileio","scanning path %s",specification) + if trace_locating then + logs.report("fileio","scanning path '%s'",specification) end instance.files[tag] = { } local files = instance.files[tag] @@ -4900,7 +5716,8 @@ function resolvers.generators.tex(specification) full = spec end for name in directory(full) do - if not weird:match(name) then + if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then @@ -4933,7 +5750,7 @@ function resolvers.generators.tex(specification) end end action() - if trace_verbose then + if trace_locating then logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r) end end @@ -4948,11 +5765,48 @@ end -- we join them and split them after the expansion has taken place. This -- is more convenient. +--~ local checkedsplit = string.checkedsplit + +local cache = { } + +local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;"))) + +local function split_kpse_path(str) -- beware, this can be either a path or a {specification} + local found = cache[str] + if not found then + if str == "" then + found = { } + else + str = gsub(str,"\\","/") +--~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator) +local split = lpegmatch(splitter,str) + found = { } + for i=1,#split do + local s = split[i] + if not find(s,"^{*unset}*") then + found[#found+1] = s + end + end + if trace_expansions then + logs.report("fileio","splitting path specification '%s'",str) + for k=1,#found do + logs.report("fileio","% 4i: %s",k,found[k]) + end + end + cache[str] = found + end + end + return found +end + +resolvers.split_kpse_path = split_kpse_path + function resolvers.splitconfig() - for i,c in ipairs(instance) do - for k,v in pairs(c) do + for i=1,#instance do + local c = instance[i] + for k,v in next, c do if type(v) == 'string' then - local t = file.split_path(v) + local t = split_kpse_path(v) if #t > 1 then c[k] = t end @@ -4962,21 +5816,25 @@ function resolvers.splitconfig() end function resolvers.joinconfig() - for i,c in ipairs(instance.order) do - for k,v in pairs(c) do -- ipairs? + local order = instance.order + for i=1,#order do + local c = order[i] + for k,v in next, c do -- indexed? if type(v) == 'table' then c[k] = file.join_path(v) end end end end + function resolvers.split_path(str) if type(str) == 'table' then return str else - return file.split_path(str) + return split_kpse_path(str) end end + function resolvers.join_path(str) if type(str) == 'table' then return file.join_path(str) @@ -4988,8 +5846,9 @@ end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do - local t, h = { }, { } - for _,vv in ipairs(file.split_path(v)) do + local t, h, p = { }, { }, split_kpse_path(v) + for kk=1,#p do + local vv = p[kk] if vv ~= "" and not h[vv] then t[#t+1] = vv h[vv] = true @@ -5036,11 +5895,15 @@ function resolvers.serialize(files) end t[#t+1] = "return {" if instance.sortdata then - for _, k in pairs(sortedkeys(files)) do -- ipairs + local sortedfiles = sortedkeys(files) + for i=1,#sortedfiles do + local k = sortedfiles[i] local fk = files[k] if type(fk) == 'table' then t[#t+1] = "\t['" .. k .. "']={" - for _, kk in pairs(sortedkeys(fk)) do -- ipairs + local sortedfk = sortedkeys(fk) + for j=1,#sortedfk do + local kk = sortedfk[j] t[#t+1] = dump(kk,fk[kk],"\t\t") end t[#t+1] = "\t}," @@ -5065,12 +5928,18 @@ function resolvers.serialize(files) return concat(t,"\n") end +local data_state = { } + +function resolvers.data_state() + return data_state or { } +end + function resolvers.save_data(dataname, makename) -- untested without cache overload for cachename, files in next, instance[dataname] do local name = (makename or file.join)(cachename,dataname) local luaname, lucname = name .. ".lua", name .. ".luc" - if trace_verbose then - logs.report("fileio","preparing %s for %s",dataname,cachename) + if trace_locating then + logs.report("fileio","preparing '%s' for '%s'",dataname,cachename) end for k, v in next, files do if type(v) == "table" and #v == 1 then @@ -5084,24 +5953,25 @@ function resolvers.save_data(dataname, makename) -- untested without cache overl date = os.date("%Y-%m-%d"), time = os.date("%H:%M:%S"), content = files, + uuid = os.uuid(), } local ok = io.savedata(luaname,resolvers.serialize(data)) if ok then - if trace_verbose then - logs.report("fileio","%s saved in %s",dataname,luaname) + if trace_locating then + logs.report("fileio","'%s' saved in '%s'",dataname,luaname) end if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip - if trace_verbose then - logs.report("fileio","%s compiled to %s",dataname,lucname) + if trace_locating then + logs.report("fileio","'%s' compiled to '%s'",dataname,lucname) end else - if trace_verbose then - logs.report("fileio","compiling failed for %s, deleting file %s",dataname,lucname) + if trace_locating then + logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname) end os.remove(lucname) end - elseif trace_verbose then - logs.report("fileio","unable to save %s in %s (access error)",dataname,luaname) + elseif trace_locating then + logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname) end end end @@ -5113,19 +5983,20 @@ function resolvers.load_data(pathname,dataname,filename,makename) -- untested wi if blob then local data = blob() if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then - if trace_verbose then - logs.report("fileio","loading %s for %s from %s",dataname,pathname,filename) + data_state[#data_state+1] = data.uuid + if trace_locating then + logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = data.content else - if trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + if trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + elseif trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end end @@ -5144,15 +6015,17 @@ function resolvers.resetconfig() end function resolvers.loadnewconfig() - for _, cnf in ipairs(instance.luafiles) do + local luafiles = instance.luafiles + for i=1,#luafiles do + local cnf = luafiles[i] local pathname = file.dirname(cnf) local filename = file.join(pathname,resolvers.luaname) local blob = loadfile(filename) if blob then local data = blob() if data then - if trace_verbose then - logs.report("fileio","loading configuration file %s",filename) + if trace_locating then + logs.report("fileio","loading configuration file '%s'",filename) end if true then -- flatten to variable.progname @@ -5173,14 +6046,14 @@ function resolvers.loadnewconfig() instance['setup'][pathname] = data end else - if trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + if trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance['setup'][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance.order[#instance.order+1] = instance.setup[pathname] if instance.loaderror then break end @@ -5189,7 +6062,9 @@ end function resolvers.loadoldconfig() if not instance.renewcache then - for _, cnf in ipairs(instance.cnffiles) do + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + local cnf = cnffiles[i] local dname = file.dirname(cnf) resolvers.load_data(dname,'configuration') instance.order[#instance.order+1] = instance.configuration[dname] @@ -5379,7 +6254,7 @@ end function resolvers.expanded_path_list(str) if not str then - return ep or { } + return ep or { } -- ep ? elseif instance.savelists then -- engine+progname hash str = gsub(str,"%$","") @@ -5397,9 +6272,9 @@ end function resolvers.expanded_path_list_from_var(str) -- brrr local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$","")) if tmp ~= "" then - return resolvers.expanded_path_list(str) - else return resolvers.expanded_path_list(tmp) + else + return resolvers.expanded_path_list(str) end end @@ -5446,9 +6321,9 @@ function resolvers.isreadable.file(name) local readable = lfs.isfile(name) -- brrr if trace_detail then if readable then - logs.report("fileio","+ readable: %s",name) + logs.report("fileio","file '%s' is readable",name) else - logs.report("fileio","- readable: %s", name) + logs.report("fileio","file '%s' is not readable", name) end end return readable @@ -5464,7 +6339,7 @@ local function collect_files(names) for k=1,#names do local fname = names[k] if trace_detail then - logs.report("fileio","? blobpath asked: %s",fname) + logs.report("fileio","checking name '%s'",fname) end local bname = file.basename(fname) local dname = file.dirname(fname) @@ -5480,7 +6355,7 @@ local function collect_files(names) local files = blobpath and instance.files[blobpath] if files then if trace_detail then - logs.report("fileio",'? blobpath do: %s (%s)',blobpath,bname) + logs.report("fileio","deep checking '%s' (%s)",blobpath,bname) end local blobfile = files[bname] if not blobfile then @@ -5514,7 +6389,7 @@ local function collect_files(names) end end elseif trace_locating then - logs.report("fileio",'! blobpath no: %s (%s)',blobpath,bname) + logs.report("fileio","no match in '%s' (%s)",blobpath,bname) end end end @@ -5564,14 +6439,13 @@ end local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc) local result = collected or { } local stamp = nil - filename = file.collapse_path(filename) -- elsewhere - filename = file.collapse_path(gsub(filename,"\\","/")) -- elsewhere + filename = file.collapse_path(filename) -- speed up / beware: format problem if instance.remember then stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format if instance.found[stamp] then if trace_locating then - logs.report("fileio",'! remembered: %s',filename) + logs.report("fileio","remembering file '%s'",filename) end return instance.found[stamp] end @@ -5579,7 +6453,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not dangerous[instance.format or "?"] then if resolvers.isreadable.file(filename) then if trace_detail then - logs.report("fileio",'= found directly: %s',filename) + logs.report("fileio","file '%s' found directly",filename) end instance.found[stamp] = { filename } return { filename } @@ -5587,13 +6461,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end if find(filename,'%*') then if trace_locating then - logs.report("fileio",'! wildcard: %s', filename) + logs.report("fileio","checking wildcard '%s'", filename) end result = resolvers.find_wildcard_files(filename) elseif file.is_qualified_path(filename) then if resolvers.isreadable.file(filename) then if trace_locating then - logs.report("fileio",'! qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end result = { filename } else @@ -5603,7 +6477,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. ".tex" if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing standard filetype: tex') + logs.report("fileio","no suffix, forcing standard filetype 'tex'") end result, ok = { forcedname }, true end @@ -5613,7 +6487,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. "." .. s if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing format filetype: %s', s) + logs.report("fileio","no suffix, forcing format filetype '%s'", s) end result, ok = { forcedname }, true break @@ -5625,7 +6499,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- try to find in tree (no suffix manipulation), here we search for the -- matching last part of the name local basename = file.basename(filename) - local pattern = (filename .. "$"):gsub("([%.%-])","%%%1") + local pattern = gsub(filename .. "$","([%.%-])","%%%1") local savedformat = instance.format local format = savedformat or "" if format == "" then @@ -5635,19 +6509,21 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan instance.format = "othertextfiles" -- kind of everything, maybe texinput is better end -- - local resolved = collect_instance_files(basename) - if #result == 0 then - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered) + if basename ~= filename then + local resolved = collect_instance_files(basename) + if #result == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered) + end end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if rr:find(pattern) then - result[#result+1], ok = rr, true + resolvers.format = savedformat + -- + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1], ok = rr, true + end end end -- a real wildcard: @@ -5656,14 +6532,14 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- local filelist = collect_files({basename}) -- for f=1,#filelist do -- local ff = filelist[f][3] or "" - -- if ff:find(pattern) then + -- if find(ff,pattern) then -- result[#result+1], ok = ff, true -- end -- end -- end end if not ok and trace_locating then - logs.report("fileio",'? qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end end else @@ -5682,12 +6558,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan wantedfiles[#wantedfiles+1] = forcedname filetype = resolvers.format_of_suffix(forcedname) if trace_locating then - logs.report("fileio",'! forcing filetype: %s',filetype) + logs.report("fileio","forcing filetype '%s'",filetype) end else filetype = resolvers.format_of_suffix(filename) if trace_locating then - logs.report("fileio",'! using suffix based filetype: %s',filetype) + logs.report("fileio","using suffix based filetype '%s'",filetype) end end else @@ -5699,7 +6575,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end filetype = instance.format if trace_locating then - logs.report("fileio",'! using given filetype: %s',filetype) + logs.report("fileio","using given filetype '%s'",filetype) end end local typespec = resolvers.variable_of_format(filetype) @@ -5707,9 +6583,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then - logs.report("fileio",'? filename: %s',filename) - logs.report("fileio",'? filetype: %s',filetype or '?') - logs.report("fileio",'? wanted files: %s',concat(wantedfiles," | ")) + logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end for k=1,#wantedfiles do local fname = wantedfiles[k] @@ -5730,36 +6604,59 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan else -- list search local filelist = collect_files(wantedfiles) - local doscan, recurse + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = file.dirname(filelist[i][2]) .. "/" + end + end if trace_detail then - logs.report("fileio",'? filename: %s',filename) + logs.report("fileio","checking filename '%s'",filename) end -- a bit messy ... esp the doscan setting here + local doscan for k=1,#pathlist do local path = pathlist[k] if find(path,"^!!") then doscan = false else doscan = true end - if find(path,"//$") then recurse = true else recurse = false end local pathname = gsub(path,"^!+", '') done = false -- using file list - if filelist and not (done and not instance.allresults) and recurse then - -- compare list entries with permitted pattern - pathname = gsub(pathname,"([%-%.])","%%%1") -- this also influences - pathname = gsub(pathname,"/+$", '/.*') -- later usage of pathname - pathname = gsub(pathname,"//", '/.-/') -- not ok for /// but harmless - local expr = "^" .. pathname + if filelist then + local expression + -- compare list entries with permitted pattern -- /xx /xx// + if not find(pathname,"/$") then + expression = pathname .. "/" + else + expression = pathname + end + expression = gsub(expression,"([%-%.])","%%%1") -- this also influences + expression = gsub(expression,"//+$", '/.*') -- later usage of pathname + expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless + expression = "^" .. expression .. "$" + if trace_detail then + logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname) + end for k=1,#filelist do local fl = filelist[k] local f = fl[2] - if find(f,expr) then - if trace_detail then - logs.report("fileio",'= found in hash: %s',f) - end + local d = dirlist[k] + if find(d,expression) then --- todo, test for readable result[#result+1] = fl[3] resolvers.register_in_trees(f) -- for tracing used files done = true - if not instance.allresults then break end + if instance.allresults then + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d) + end + else + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d) + end + break + end + elseif trace_detail then + logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d) end end end @@ -5775,7 +6672,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan local fname = file.join(ppname,w) if resolvers.isreadable.file(fname) then if trace_detail then - logs.report("fileio",'= found by scanning: %s',fname) + logs.report("fileio","found '%s' by scanning",fname) end result[#result+1] = fname done = true @@ -5838,7 +6735,7 @@ function resolvers.find_given_files(filename) local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] - local files = instance.files[hash.tag] + local files = instance.files[hash.tag] or { } local blist = files[bname] if not blist then local rname = "remap:"..bname @@ -5948,9 +6845,9 @@ function resolvers.load(option) statistics.starttiming(instance) resolvers.resetconfig() resolvers.identify_cnf() - resolvers.load_lua() + resolvers.load_lua() -- will become the new method resolvers.expand_variables() - resolvers.load_cnf() + resolvers.load_cnf() -- will be skipped when we have a lua file resolvers.expand_variables() if option ~= "nofiles" then resolvers.load_hash() @@ -5962,22 +6859,23 @@ end function resolvers.for_files(command, files, filetype, mustexist) if files and #files > 0 then local function report(str) - if trace_verbose then + if trace_locating then logs.report("fileio",str) -- has already verbose else print(str) end end - if trace_verbose then - report('') + if trace_locating then + report('') -- ? end - for _, file in ipairs(files) do + for f=1,#files do + local file = files[f] local result = command(file,filetype,mustexist) if type(result) == 'string' then report(result) else - for _,v in ipairs(result) do - report(v) + for i=1,#result do + report(result[i]) -- could be unpack end end end @@ -6024,18 +6922,19 @@ end function table.sequenced(t,sep) -- temp here local s = { } - for k, v in pairs(t) do -- pairs? - s[#s+1] = k .. "=" .. v + for k, v in next, t do -- indexed? + s[#s+1] = k .. "=" .. tostring(v) end return concat(s, sep or " | ") end function resolvers.methodhandler(what, filename, filetype) -- ... + filename = file.collapse_path(filename) local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb local scheme = specification.scheme if resolvers[what][scheme] then if trace_locating then - logs.report("fileio",'= handler: %s -> %s -> %s',specification.original,what,table.sequenced(specification)) + logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) end return resolvers[what][scheme](filename,filetype) -- todo: specification else @@ -6055,8 +6954,9 @@ function resolvers.clean_path(str) end function resolvers.do_with_path(name,func) - for _, v in pairs(resolvers.expanded_path_list(name)) do -- pairs? - func("^"..resolvers.clean_path(v)) + local pathlist = resolvers.expanded_path_list(name) + for i=1,#pathlist do + func("^"..resolvers.clean_path(pathlist[i])) end end @@ -6065,7 +6965,9 @@ function resolvers.do_with_var(name,func) end function resolvers.with_files(pattern,handle) - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for i=1,#hashes do + local hash = hashes[i] local blobpath = hash.tag local blobtype = hash.type if blobpath then @@ -6080,7 +6982,7 @@ function resolvers.with_files(pattern,handle) if type(v) == "string" then handle(blobtype,blobpath,v,k) else - for _,vv in pairs(v) do -- ipairs? + for _,vv in next, v do -- indexed handle(blobtype,blobpath,vv,k) end end @@ -6092,7 +6994,7 @@ function resolvers.with_files(pattern,handle) end function resolvers.locate_format(name) - local barename, fmtname = name:gsub("%.%a+$",""), "" + local barename, fmtname = gsub(name,"%.%a+$",""), "" if resolvers.usecache then local path = file.join(caches.setpath("formats")) -- maybe platform fmtname = file.join(path,barename..".fmt") or "" @@ -6140,7 +7042,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6164,7 +7066,7 @@ luatools with a recache feature.

local format, lower, gsub = string.format, string.lower, string.gsub -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet caches = caches or { } @@ -6251,7 +7153,8 @@ function caches.setpath(...) caches.path = '.' end caches.path = resolvers.clean_path(caches.path) - if not table.is_empty({...}) then + local dirs = { ... } + if #dirs > 0 then local pth = dir.mkdirs(caches.path,...) return pth end @@ -6297,6 +7200,7 @@ function caches.savedata(filepath,filename,data,raw) if raw then reduce, simplify = false, false end + data.cache_uuid = os.uuid() if caches.direct then file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex else @@ -6322,7 +7226,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6343,7 +7247,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-out'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6359,7 +7263,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-con'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6370,8 +7274,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) --[[ldx--

Once we found ourselves defining similar cache constructs @@ -6435,7 +7337,7 @@ end function containers.is_valid(container, name) if name and name ~= "" then local storage = container.storage[name] - return storage and not table.is_empty(storage) and storage.cache_version == container.version + return storage and storage.cache_version == container.version else return false end @@ -6487,16 +7389,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-use'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, lower, gsub = string.format, string.lower, string.gsub +local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -- since we want to use the cache instead of the tree, we will now -- reimplement the saver. @@ -6540,19 +7441,20 @@ resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT')) - if table.is_empty(mountpaths) and usecache then + if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = { caches.setpath("mount") } end - if not table.is_empty(mountpaths) then + if mountpaths and #mountpaths > 0 then statistics.starttiming(resolvers.instance) - for k, root in pairs(mountpaths) do + for k=1,#mountpaths do + local root = mountpaths[k] local f = io.open(root.."/url.tmi") if f then for line in f:lines() do if line then - if line:find("^[%%#%-]") then -- or %W + if find(line,"^[%%#%-]") then -- or %W -- skip - elseif line:find("^zip://") then + elseif find(line,"^zip://") then if trace_locating then logs.report("fileio","mounting %s",line) end @@ -6597,11 +7499,13 @@ function statistics.check_fmt_status(texname) local luv = dofile(luvname) if luv and luv.sourcefile then local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown") - if luv.enginebanner and luv.enginebanner ~= enginebanner then - return "engine mismatch" + local luvbanner = luv.enginebanner or "?" + if luvbanner ~= enginebanner then + return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner) end - if luv.sourcehash and luv.sourcehash ~= sourcehash then - return "source mismatch" + local luvhash = luv.sourcehash or "?" + if luvhash ~= sourcehash then + return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash) end else return "invalid status file" @@ -6727,7 +7631,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6735,47 +7639,47 @@ if not modules then modules = { } end modules ['data-aux'] = { local find = string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") local oldscript = resolvers.clean_path(oldname) - if trace_verbose then + if trace_locating then logs.report("fileio","to be replaced old script %s", oldscript) end local newscripts = resolvers.find_files(newname) or { } if #newscripts == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","unable to locate new script") end else for i=1,#newscripts do local newscript = resolvers.clean_path(newscripts[i]) - if trace_verbose then + if trace_locating then logs.report("fileio","checking new script %s", newscript) end if oldscript == newscript then - if trace_verbose then + if trace_locating then logs.report("fileio","old and new script are the same") end elseif not find(newscript,scriptpath) then - if trace_verbose then + if trace_locating then logs.report("fileio","new script should come from %s",scriptpath) end elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then - if trace_verbose then + if trace_locating then logs.report("fileio","invalid new script name") end else local newdata = io.loaddata(newscript) if newdata then - if trace_verbose then + if trace_locating then logs.report("fileio","old script content replaced by new content") end io.savedata(oldscript,newdata) break - elseif trace_verbose then + elseif trace_locating then logs.report("fileio","unable to load new script") end end @@ -6790,7 +7694,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-lst'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6814,7 +7718,9 @@ local function list(list,report) local instance = resolvers.instance local pat = upper(pattern or "","") local report = report or texio.write_nl - for _,key in pairs(table.sortedkeys(list)) do + local sorted = table.sortedkeys(list) + for i=1,#sorted do + local key = sorted[i] if instance.pattern == "" or find(upper(key),pat) then if instance.kpseonly then if instance.kpsevars[key] then @@ -6833,11 +7739,14 @@ function resolvers.listers.expansions() list(resolvers.instance.expansions) end function resolvers.listers.configurations(report) local report = report or texio.write_nl local instance = resolvers.instance - for _,key in ipairs(table.sortedkeys(instance.kpsevars)) do + local sorted = table.sortedkeys(instance.kpsevars) + for i=1,#sorted do + local key = sorted[i] if not instance.pattern or (instance.pattern=="") or find(key,instance.pattern) then report(format("%s\n",key)) - for i,c in ipairs(instance.order) do - local str = c[key] + local order = instance.order + for i=1,#order do + local str = order[i][key] if str then report(format("\t%s\t%s",i,str)) end @@ -6943,7 +7852,7 @@ if not resolvers then os.exit() end -logs.setprogram('LuaTools',"TDS Management Tool 1.31",environment.arguments["verbose"] or false) +logs.setprogram('LuaTools',"TDS Management Tool 1.32",environment.arguments["verbose"] or false) local instance = resolvers.reset() @@ -7000,6 +7909,12 @@ end if environment.arguments["trace"] then resolvers.settrace(environment.arguments["trace"]) end +local trackspec = environment.argument("trackers") or environment.argument("track") + +if trackspec then + trackers.enable(trackspec) +end + runners = runners or { } messages = messages or { } @@ -7033,6 +7948,7 @@ messages.help = [[ --engine=str target engine --progname=str format or backend --pattern=str filter variables +--trackers=list enable given trackers ]] function runners.make_format(texname) @@ -7091,8 +8007,9 @@ function runners.make_format(texname) logs.simple("using uncompiled initialization file: %s",luaname) end else - for _, v in pairs({instance.luaname, instance.progname, barename}) do - v = string.gsub(v..".lua","%.lua%.lua$",".lua") + local what = { instance.luaname, instance.progname, barename } + for k=1,#what do + local v = string.gsub(what[k]..".lua","%.lua%.lua$",".lua") if v and (v ~= "") then luaname = resolvers.find_files(v)[1] or "" if luaname ~= "" then @@ -7116,7 +8033,8 @@ function runners.make_format(texname) logs.simple("using lua initialization file: %s",luaname) local mp = dir.glob(file.removesuffix(file.basename(luaname)).."-*.mem") if mp and #mp > 0 then - for _, name in ipairs(mp) do + for i=1,#mp do + local name = mp[i] logs.simple("removing related mplib format %s", file.basename(name)) os.remove(name) end diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/makempy.bat b/Master/texmf-dist/scripts/context/stubs/mswin/makempy.bat deleted file mode 100755 index d02bbf7ca7e..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/makempy.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute makempy.pl %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/metatex.cmd b/Master/texmf-dist/scripts/context/stubs/mswin/metatex.cmd deleted file mode 100644 index 858f28f8fd5..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/metatex.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --script metatex %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/metatex.exe b/Master/texmf-dist/scripts/context/stubs/mswin/metatex.exe new file mode 100644 index 00000000000..2d45f27494d Binary files /dev/null and b/Master/texmf-dist/scripts/context/stubs/mswin/metatex.exe differ diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mpstools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/mpstools.bat deleted file mode 100755 index 8fa0c9639c2..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/mpstools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute mpstools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mptopdf.bat b/Master/texmf-dist/scripts/context/stubs/mswin/mptopdf.bat deleted file mode 100755 index 3cab9669f7e..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/mptopdf.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute mptopdf.pl %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.cmd b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.cmd deleted file mode 100755 index 858bab87fed..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll new file mode 100644 index 00000000000..23e476cac49 Binary files /dev/null and b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.dll differ diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe new file mode 100644 index 00000000000..745eaf22464 Binary files /dev/null and b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.exe differ diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua index 82d1edecbc5..b99327692d7 100644 --- a/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua +++ b/Master/texmf-dist/scripts/context/stubs/mswin/mtxrun.lua @@ -48,13 +48,16 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-string'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local sub, gsub, find, match, gmatch, format, char, byte, rep = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep +local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower +local lpegmatch = lpeg.match + +-- some functions may disappear as they are not used anywhere if not string.split then @@ -94,8 +97,16 @@ function string:unquote() return (gsub(self,"^([\"\'])(.*)%1$","%2")) end +--~ function string:unquote() +--~ if find(self,"^[\'\"]") then +--~ return sub(self,2,-2) +--~ else +--~ return self +--~ end +--~ end + function string:quote() -- we could use format("%q") - return '"' .. self:unquote() .. '"' + return format("%q",self) end function string:count(pattern) -- variant 3 @@ -115,12 +126,23 @@ function string:limit(n,sentinel) end end -function string:strip() - return (gsub(self,"^%s*(.-)%s*$", "%1")) +--~ function string:strip() -- the .- is quite efficient +--~ -- return match(self,"^%s*(.-)%s*$") or "" +--~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list +--~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)') +--~ end + +do -- roberto's variant: + local space = lpeg.S(" \t\v\n") + local nospace = 1 - space + local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0) + function string.strip(str) + return lpegmatch(stripper,str) or "" + end end function string:is_empty() - return not find(find,"%S") + return not find(self,"%S") end function string:enhance(pattern,action) @@ -154,14 +176,14 @@ if not string.characters then local function nextchar(str, index) index = index + 1 - return (index <= #str) and index or nil, str:sub(index,index) + return (index <= #str) and index or nil, sub(str,index,index) end function string:characters() return nextchar, self, 0 end local function nextbyte(str, index) index = index + 1 - return (index <= #str) and index or nil, byte(str:sub(index,index)) + return (index <= #str) and index or nil, byte(sub(str,index,index)) end function string:bytes() return nextbyte, self, 0 @@ -174,7 +196,7 @@ end function string:rpadd(n,chr) local m = n-#self if m > 0 then - return self .. self.rep(chr or " ",m) + return self .. rep(chr or " ",m) else return self end @@ -183,7 +205,7 @@ end function string:lpadd(n,chr) local m = n-#self if m > 0 then - return self.rep(chr or " ",m) .. self + return rep(chr or " ",m) .. self else return self end @@ -231,6 +253,17 @@ function string:pattesc() return (gsub(self,".",patterns_escapes)) end +local simple_escapes = { + ["-"] = "%-", + ["."] = "%.", + ["?"] = ".", + ["*"] = ".*", +} + +function string:simpleesc() + return (gsub(self,".",simple_escapes)) +end + function string:tohash() local t = { } for s in gmatch(self,"([^, ]+)") do -- lpeg @@ -242,10 +275,10 @@ end local pattern = lpeg.Ct(lpeg.C(1)^0) function string:totable() - return pattern:match(self) + return lpegmatch(pattern,self) end ---~ for _, str in ipairs { +--~ local t = { --~ "1234567123456712345671234567", --~ "a\tb\tc", --~ "aa\tbb\tcc", @@ -253,7 +286,10 @@ end --~ "aaaa\tbbbb\tcccc", --~ "aaaaa\tbbbbb\tccccc", --~ "aaaaaa\tbbbbbb\tcccccc", ---~ } do print(string.tabtospace(str)) end +--~ } +--~ for k,v do +--~ print(string.tabtospace(t[k])) +--~ end function string.tabtospace(str,tab) -- we don't handle embedded newlines @@ -261,7 +297,7 @@ function string.tabtospace(str,tab) local s = find(str,"\t") if s then if not tab then tab = 7 end -- only when found - local d = tab-(s-1)%tab + local d = tab-(s-1) % tab if d > 0 then str = gsub(str,"\t",rep(" ",d),1) else @@ -280,6 +316,25 @@ function string:compactlong() -- strips newlines and leading spaces return self end +function string:striplong() -- strips newlines and leading spaces + self = gsub(self,"^%s*","") + self = gsub(self,"[\n\r]+ *","\n") + return self +end + +function string:topattern(lowercase,strict) + if lowercase then + self = lower(self) + end + self = gsub(self,".",simple_escapes) + if self == "" then + self = ".*" + elseif strict then + self = "^" .. self .. "$" + end + return self +end + end -- of closure @@ -287,58 +342,64 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-lpeg'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc - ---~ l-lpeg.lua : - ---~ lpeg.digit = lpeg.R('09')^1 ---~ lpeg.sign = lpeg.S('+-')^1 ---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1) ---~ lpeg.number = lpeg.float + lpeg.integer ---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1 ---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1 ---~ lpeg.uppercase = lpeg.P("AZ") ---~ lpeg.lowercase = lpeg.P("az") - ---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed ---~ lpeg.space = lpeg.S(' ')^1 ---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1 ---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1 ---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1 - -local hash = { } +local lpeg = require("lpeg") + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + +local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V +local match = lpeg.match + +local digit, sign = R('09'), S('+-') +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local utf8byte = R("\128\191") + +patterns.utf8byte = utf8byte +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8byte +patterns.utf8three = R("\224\239") * utf8byte * utf8byte +patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte + +patterns.digit = digit +patterns.sign = sign +patterns.cardinal = sign^0 * digit^1 +patterns.integer = sign^0 * digit^1 +patterns.float = sign^0 * digit^0 * P('.') * digit^1 +patterns.number = patterns.float + patterns.integer +patterns.oct = P("0") * R("07")^1 +patterns.octal = patterns.oct +patterns.HEX = P("0x") * R("09","AF")^1 +patterns.hex = P("0x") * R("09","af")^1 +patterns.hexadecimal = P("0x") * R("09","AF","af")^1 +patterns.lowercase = R("az") +patterns.uppercase = R("AZ") +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = S(" ") +patterns.eol = S("\n\r") +patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) +patterns.newline = crlf + cr + lf +patterns.nonspace = 1 - patterns.space +patterns.nonspacer = 1 - patterns.spacer +patterns.whitespace = patterns.eol + patterns.spacer +patterns.nonwhitespace = 1 - patterns.whitespace +patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * lpeg.V(1) } -end - -function lpeg.startswith(pattern) --slightly adapted - return P(pattern) + return P { P(pattern) + 1 * V(1) } -- why so complex? end function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end --- variant: - ---~ local parser = lpeg.Ct(lpeg.splitat(newline)) - -local crlf = P("\r\n") -local cr = P("\r") -local lf = P("\n") -local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -local newline = crlf + cr + lf -local spacing = space^0 * newline - +local spacing = patterns.spacer^0 * patterns.newline -- sort of strip local empty = spacing * Cc("") local nonempty = Cs((1-spacing)^1) * spacing^-1 local content = (empty + nonempty)^1 @@ -346,15 +407,15 @@ local content = (empty + nonempty)^1 local capture = Ct(content^0) function string:splitlines() - return capture:match(self) + return match(capture,self) end -lpeg.linebyline = content -- better make a sublibrary +patterns.textline = content ---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more ---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more ---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps ---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps local splitters_s, splitters_m = { }, { } @@ -364,7 +425,7 @@ local function splitat(separator,single) separator = P(separator) if single then local other, any = C((1 - separator)^0), P(1) - splitter = other * (separator * C(any^0) + "") + splitter = other * (separator * C(any^0) + "") -- ? splitters_s[separator] = splitter else local other = C((1 - separator)^0) @@ -379,15 +440,72 @@ lpeg.splitat = splitat local cache = { } +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = Ct(splitat(separator)) + cache[separator] = c + end + return match(c,str) +end + function string:split(separator) local c = cache[separator] if not c then c = Ct(splitat(separator)) cache[separator] = c end - return c:match(self) + return match(c,self) +end + +lpeg.splitters = cache + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,str) +end + +function string:checkedsplit(separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,self) end +--~ function lpeg.append(list,pp) +--~ local p = pp +--~ for l=1,#list do +--~ if p then +--~ p = p + P(list[l]) +--~ else +--~ p = P(list[l]) +--~ end +--~ end +--~ return p +--~ end + +--~ from roberto's site: + +local f1 = string.byte + +local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + end -- of closure @@ -395,7 +513,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-table'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -404,9 +522,58 @@ if not modules then modules = { } end modules ['l-table'] = { table.join = table.concat local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove -local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump +local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match local getmetatable, setmetatable = getmetatable, setmetatable -local type, next, tostring, ipairs = type, next, tostring, ipairs +local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs + +-- Starting with version 5.2 Lua no longer provide ipairs, which makes +-- sense. As we already used the for loop and # in most places the +-- impact on ConTeXt was not that large; the remaining ipairs already +-- have been replaced. In a similar fashio we also hardly used pairs. +-- +-- Just in case, we provide the fallbacks as discussed in Programming +-- in Lua (http://www.lua.org/pil/7.3.html): + +if not ipairs then + + -- for k, v in ipairs(t) do ... end + -- for k=1,#t do local v = t[k] ... end + + local function iterate(a,i) + i = i + 1 + local v = a[i] + if v ~= nil then + return i, v --, nil + end + end + + function ipairs(a) + return iterate, a, 0 + end + +end + +if not pairs then + + -- for k, v in pairs(t) do ... end + -- for k, v in next, t do ... end + + function pairs(t) + return next, t -- , nil + end + +end + +-- Also, unpack has been moved to the table table, and for compatiility +-- reasons we provide both now. + +if not table.unpack then + table.unpack = _G.unpack +elseif not unpack then + _G.unpack = table.unpack +end + +-- extra functions, some might go (when not used) function table.strip(tab) local lst = { } @@ -421,6 +588,14 @@ function table.strip(tab) return lst end +function table.keys(t) + local k = { } + for key, _ in next, t do + k[#k+1] = key + end + return k +end + local function compare(a,b) return (tostring(a) < tostring(b)) end @@ -464,7 +639,7 @@ end table.sortedkeys = sortedkeys table.sortedhashkeys = sortedhashkeys -function table.sortedpairs(t) +function table.sortedhash(t) local s = sortedhashkeys(t) -- maybe just sortedkeys local n = 0 local function kv(s) @@ -475,6 +650,8 @@ function table.sortedpairs(t) return kv, s end +table.sortedpairs = table.sortedhash + function table.append(t, list) for _,v in next, list do insert(t,v) @@ -583,7 +760,7 @@ end table.fastcopy = fastcopy table.copy = copy --- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) +-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) function table.sub(t,i,j) return { unpack(t,i,j) } @@ -597,18 +774,18 @@ end -- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) -function table.is_empty(t) +function table.is_empty(t) -- obolete, use inline code instead return not t or not next(t) end -function table.one_entry(t) +function table.one_entry(t) -- obolete, use inline code instead local n = next(t) return n and not next(t,n) end -function table.starts_at(t) - return ipairs(t,1)(t,0) -end +--~ function table.starts_at(t) -- obsolete, not nice anyway +--~ return ipairs(t,1)(t,0) +--~ end function table.tohash(t,value) local h = { } @@ -686,6 +863,8 @@ end -- -- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) +-- problem: there no good number_to_string converter with the best resolution + local function do_serialize(root,name,depth,level,indexed) if level > 0 then depth = depth .. " " @@ -708,8 +887,9 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s{",depth)) end end + -- we could check for k (index) being number (cardinal) if root and next(root) then - local first, last = nil, 0 -- #root cannot be trusted here + local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) if compact then -- NOT: for k=1,#root do (we need to quit at nil) for k,v in ipairs(root) do -- can we use next? @@ -730,10 +910,10 @@ local function do_serialize(root,name,depth,level,indexed) if hexify then handle(format("%s 0x%04X,",depth,v)) else - handle(format("%s %s,",depth,v)) + handle(format("%s %s,",depth,v)) -- %.99g end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) @@ -770,29 +950,29 @@ local function do_serialize(root,name,depth,level,indexed) --~ if hexify then --~ handle(format("%s %s=0x%04X,",depth,key(k),v)) --~ else - --~ handle(format("%s %s=%s,",depth,key(k),v)) + --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g --~ end if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) else - handle(format("%s [%s]=%s,",depth,k,v)) + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g end elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then if hexify then handle(format("%s %s=0x%04X,",depth,k,v)) else - handle(format("%s %s=%s,",depth,k,v)) + handle(format("%s %s=%s,",depth,k,v)) -- %.99g end else if hexify then handle(format("%s [%q]=0x%04X,",depth,k,v)) else - handle(format("%s [%q]=%s,",depth,k,v)) + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g end end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then --~ handle(format("%s %s=%s,",depth,key(k),v)) if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then @@ -1001,7 +1181,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify) end end -local function flatten(t,f,complete) +local function flatten(t,f,complete) -- is this used? meybe a variant with next, ... for i=1,#t do local v = t[i] if type(v) == "table" then @@ -1030,6 +1210,24 @@ end table.flatten_one_level = table.unnest +-- a better one: + +local function flattened(t,f) + if not f then + f = { } + end + for k, v in next, t do + if type(v) == "table" then + flattened(v,f) + else + f[k] = v + end + end + return f +end + +table.flattened = flattened + -- the next three may disappear function table.remove_value(t,value) -- todo: n @@ -1165,7 +1363,7 @@ function table.clone(t,p) -- t is optional or nil or table elseif not t then t = { } end - setmetatable(t, { __index = function(_,key) return p[key] end }) + setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ? return t end @@ -1193,21 +1391,36 @@ function table.reverse(t) return tt end ---~ function table.keys(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return k ---~ end +function table.insert_before_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i,extra) + return + end + end + insert(t,1,extra) +end + +function table.insert_after_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i+1,extra) + return + end + end + insert(t,#t+1,extra) +end ---~ function table.keys_as_string(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return concat(k,"") ---~ end end -- of closure @@ -1216,13 +1429,13 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-io'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local byte = string.byte +local byte, find, gsub = string.byte, string.find, string.gsub if string.find(os.getenv("PATH"),";") then io.fileseparator, io.pathseparator = "\\", ";" @@ -1251,7 +1464,7 @@ function io.savedata(filename,data,joiner) elseif type(data) == "function" then data(f) else - f:write(data) + f:write(data or "") end f:close() return true @@ -1380,20 +1593,21 @@ function io.ask(question,default,options) end io.write(string.format(" ")) local answer = io.read() - answer = answer:gsub("^%s*(.*)%s*$","%1") + answer = gsub(answer,"^%s*(.*)%s*$","%1") if answer == "" and default then return default elseif not options then return answer else - for _,v in pairs(options) do - if v == answer then + for k=1,#options do + if options[k] == answer then return answer end end local pattern = "^" .. answer - for _,v in pairs(options) do - if v:find(pattern) then + for k=1,#options do + local v = options[k] + if find(v,pattern) then return v end end @@ -1408,20 +1622,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-number'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format = string.format +local tostring = tostring +local format, floor, insert, match = string.format, math.floor, table.insert, string.match +local lpegmatch = lpeg.match number = number or { } -- a,b,c,d,e,f = number.toset(100101) function number.toset(n) - return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") + return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") end function number.toevenhex(n) @@ -1447,10 +1663,21 @@ end local one = lpeg.C(1-lpeg.S(''))^1 function number.toset(n) - return one:match(tostring(n)) + return lpegmatch(one,tostring(n)) end - +function number.bits(n,zero) + local t, i = { }, (zero and 0) or 1 + while n > 0 do + local m = n % 2 + if m > 0 then + insert(t,1,i) + end + n = floor(n/2) + i = i + 1 + end + return t +end end -- of closure @@ -1459,7 +1686,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-set'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1549,46 +1776,63 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-os'] = { version = 1.001, - comment = "companion to luat-lub.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local find = string.find +-- maybe build io.flush in os.execute + +local find, format, gsub = string.find, string.format, string.gsub +local random, ceil = math.random, math.ceil + +local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush + +function os.execute(...) ioflush() return execute(...) end +function os.spawn (...) ioflush() return spawn (...) end +function os.exec (...) ioflush() return exec (...) end function os.resultof(command) - return io.popen(command,"r"):read("*all") + ioflush() -- else messed up logging + local handle = io.popen(command,"r") + if not handle then + -- print("unknown command '".. command .. "' in os.resultof") + return "" + else + return handle:read("*all") or "" + end end -if not os.exec then os.exec = os.execute end -if not os.spawn then os.spawn = os.execute end - ---~ os.type : windows | unix (new, we already guessed os.platform) ---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.type : windows | unix (new, we already guessed os.platform) +--~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.platform : extended os.name with architecture if not io.fileseparator then if find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator, os.platform = "\\", ";", os.type or "windows" + io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" else - io.fileseparator, io.pathseparator, os.platform = "/" , ":", os.type or "unix" + io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" end end -os.platform = os.platform or os.type or (io.pathseparator == ";" and "windows") or "unix" +os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" +os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" + +if os.type == "windows" then + os.libsuffix, os.binsuffix = 'dll', 'exe' +else + os.libsuffix, os.binsuffix = 'so', '' +end function os.launch(str) - if os.platform == "windows" then + if os.type == "windows" then os.execute("start " .. str) -- os.spawn ? else os.execute(str .. " &") -- os.spawn ? end end -if not os.setenv then - function os.setenv() return false end -end - if not os.times then -- utime = user time -- stime = system time @@ -1618,64 +1862,218 @@ end --~ print(os.date("%H:%M:%S",os.gettimeofday())) --~ print(os.date("%H:%M:%S",os.time())) -os.arch = os.arch or function() - local a = os.resultof("uname -m") or "linux" - os.arch = function() - return a +-- no need for function anymore as we have more clever code and helpers now +-- this metatable trickery might as well disappear + +os.resolvers = os.resolvers or { } + +local resolvers = os.resolvers + +local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil +local osix = osmt.__index + +osmt.__index = function(t,k) + return (resolvers[k] or osix)(t,k) +end + +setmetatable(os,osmt) + +if not os.setenv then + + -- we still store them but they won't be seen in + -- child processes although we might pass them some day + -- using command concatination + + local env, getenv = { }, os.getenv + + function os.setenv(k,v) + env[k] = v + end + + function os.getenv(k) + return env[k] or getenv(k) end - return a + end -local platform +-- we can use HOSTTYPE on some platforms -function os.currentplatform(name,default) - if not platform then - local name = os.name or os.platform or name -- os.name is built in, os.platform is mine - if not name then - platform = default or "linux" - elseif name == "windows" or name == "mswin" or name == "win32" or name == "msdos" then - if os.getenv("PROCESSOR_ARCHITECTURE") == "AMD64" then - platform = "mswin-64" - else - platform = "mswin" - end +local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" + +local function guess() + local architecture = os.resultof("uname -m") or "" + if architecture ~= "" then + return architecture + end + architecture = os.getenv("HOSTTYPE") or "" + if architecture ~= "" then + return architecture + end + return os.resultof("echo $HOSTTYPE") or "" +end + +if platform ~= "" then + + os.platform = platform + +elseif os.type == "windows" then + + -- we could set the variable directly, no function needed here + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" + if find(architecture,"AMD64") then + platform = "mswin-64" else - local architecture = os.arch() - if name == "linux" then - if find(architecture,"x86_64") then - platform = "linux-64" - elseif find(architecture,"ppc") then - platform = "linux-ppc" - else - platform = "linux" - end - elseif name == "macosx" then - if find(architecture,"i386") then - platform = "osx-intel" - else - platform = "osx-ppc" - end - elseif name == "sunos" then - if find(architecture,"sparc") then - platform = "solaris-sparc" - else -- if architecture == 'i86pc' - platform = "solaris-intel" - end - elseif name == "freebsd" then - if find(architecture,"amd64") then - platform = "freebsd-amd64" - else - platform = "freebsd" - end - else - platform = default or name - end + platform = "mswin" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "linux" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "linux-64" + elseif find(architecture,"ppc") then + platform = "linux-ppc" + else + platform = "linux" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "macosx" then + + --[[ + Identifying the architecture of OSX is quite a mess and this + is the best we can come up with. For some reason $HOSTTYPE is + a kind of pseudo environment variable, not known to the current + environment. And yes, uname cannot be trusted either, so there + is a change that you end up with a 32 bit run on a 64 bit system. + Also, some proper 64 bit intel macs are too cheap (low-end) and + therefore not permitted to run the 64 bit kernel. + ]]-- + + function os.resolvers.platform(t,k) + -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" + -- if architecture == "" then + -- architecture = os.resultof("echo $HOSTTYPE") or "" + -- end + local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" + if architecture == "" then + -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") + platform = "osx-intel" + elseif find(architecture,"i386") then + platform = "osx-intel" + elseif find(architecture,"x86_64") then + platform = "osx-64" + else + platform = "osx-ppc" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "sunos" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"sparc") then + platform = "solaris-sparc" + else -- if architecture == 'i86pc' + platform = "solaris-intel" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "freebsd" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"amd64") then + platform = "freebsd-amd64" + else + platform = "freebsd" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "kfreebsd" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "kfreebsd-64" + else + platform = "kfreebsd-i386" end - function os.currentplatform() - return platform + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +else + + -- platform = "linux" + -- os.setenv("MTX_PLATFORM",platform) + -- os.platform = platform + + function os.resolvers.platform(t,k) + local platform = "linux" + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +end + +-- beware, we set the randomseed + +-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the +-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom +-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal +-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. +-- +-- as we don't call this function too often there is not so much risk on repetition + +local t = { 8, 9, "a", "b" } + +function os.uuid() + return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", + random(0xFFFF),random(0xFFFF), + random(0x0FFF), + t[ceil(random(4))] or 8,random(0x0FFF), + random(0xFFFF), + random(0xFFFF),random(0xFFFF),random(0xFFFF) + ) +end + +local d + +function os.timezone(delta) + d = d or tonumber(tonumber(os.date("%H")-os.date("!%H"))) + if delta then + if d > 0 then + return format("+%02i:00",d) + else + return format("-%02i:00",-d) end + else + return 1 end - return platform end @@ -1685,7 +2083,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-file'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1696,14 +2094,17 @@ if not modules then modules = { } end modules ['l-file'] = { file = file or { } local concat = table.concat -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char +local lpegmatch = lpeg.match function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) end function file.addsuffix(filename, suffix) - if not find(filename,"%.[%a%d]+$") then + if not suffix or suffix == "" then + return filename + elseif not find(filename,"%.[%a%d]+$") then return filename .. "." .. suffix else return filename @@ -1726,20 +2127,39 @@ function file.nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$","")) end -function file.extname(name) - return match(name,"^.+%.([^/\\]-)$") or "" +function file.extname(name,default) + return match(name,"^.+%.([^/\\]-)$") or default or "" end file.suffix = file.extname ---~ print(file.join("x/","/y")) ---~ print(file.join("http://","/y")) ---~ print(file.join("http://a","/y")) ---~ print(file.join("http:///a","/y")) ---~ print(file.join("//nas-1","/y")) +--~ function file.join(...) +--~ local pth = concat({...},"/") +--~ pth = gsub(pth,"\\","/") +--~ local a, b = match(pth,"^(.*://)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ a, b = match(pth,"^(//)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ return (gsub(pth,"//+","/")) +--~ end + +local trick_1 = char(1) +local trick_2 = "^" .. trick_1 .. "/+" function file.join(...) - local pth = concat({...},"/") + local lst = { ... } + local a, b = lst[1], lst[2] + if a == "" then + lst[1] = trick_1 + elseif b and find(a,"^/+$") and find(b,"^/") then + lst[1] = "" + lst[2] = gsub(b,"^/+","") + end + local pth = concat(lst,"/") pth = gsub(pth,"\\","/") local a, b = match(pth,"^(.*://)(.*)$") if a and b then @@ -1749,17 +2169,28 @@ function file.join(...) if a and b then return a .. gsub(b,"//+","/") end + pth = gsub(pth,trick_2,"") return (gsub(pth,"//+","/")) end +--~ print(file.join("//","/y")) +--~ print(file.join("/","/y")) +--~ print(file.join("","/y")) +--~ print(file.join("/x/","/y")) +--~ print(file.join("x/","/y")) +--~ print(file.join("http://","/y")) +--~ print(file.join("http://a","/y")) +--~ print(file.join("http:///a","/y")) +--~ print(file.join("//nas-1","/y")) + function file.iswritable(name) local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,".")) - return a and a.permissions:sub(2,2) == "w" + return a and sub(a.permissions,2,2) == "w" end function file.isreadable(name) local a = lfs.attributes(name) - return a and a.permissions:sub(1,1) == "r" + return a and sub(a.permissions,1,1) == "r" end file.is_readable = file.isreadable @@ -1767,36 +2198,50 @@ file.is_writable = file.iswritable -- todo: lpeg -function file.split_path(str) - local t = { } - str = gsub(str,"\\", "/") - str = gsub(str,"(%a):([;/])", "%1\001%2") - for name in gmatch(str,"([^;:]+)") do - if name ~= "" then - t[#t+1] = gsub(name,"\001",":") - end - end - return t +--~ function file.split_path(str) +--~ local t = { } +--~ str = gsub(str,"\\", "/") +--~ str = gsub(str,"(%a):([;/])", "%1\001%2") +--~ for name in gmatch(str,"([^;:]+)") do +--~ if name ~= "" then +--~ t[#t+1] = gsub(name,"\001",":") +--~ end +--~ end +--~ return t +--~ end + +local checkedsplit = string.checkedsplit + +function file.split_path(str,separator) + str = gsub(str,"\\","/") + return checkedsplit(str,separator or io.pathseparator) end function file.join_path(tab) return concat(tab,io.pathseparator) -- can have trailing // end +-- we can hash them weakly + function file.collapse_path(str) - str = gsub(str,"/%./","/") - local n, m = 1, 1 - while n > 0 or m > 0 do - str, n = gsub(str,"[^/%.]+/%.%.$","") - str, m = gsub(str,"[^/%.]+/%.%./","") - end - str = gsub(str,"([^/])/$","%1") - str = gsub(str,"^%./","") - str = gsub(str,"/%.$","") + str = gsub(str,"\\","/") + if find(str,"/") then + str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified + str = gsub(str,"/%./","/") + local n, m = 1, 1 + while n > 0 or m > 0 do + str, n = gsub(str,"[^/%.]+/%.%.$","") + str, m = gsub(str,"[^/%.]+/%.%./","") + end + str = gsub(str,"([^/])/$","%1") + -- str = gsub(str,"^%./","") -- ./xx in qualified + str = gsub(str,"/%.$","") + end if str == "" then str = "." end return str end +--~ print(file.collapse_path("/a")) --~ print(file.collapse_path("a/./b/..")) --~ print(file.collapse_path("a/aa/../b/bb")) --~ print(file.collapse_path("a/../..")) @@ -1826,27 +2271,27 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1 --~ function file.extname(name) ---~ return pattern:match(name) or "" +--~ return lpegmatch(pattern,name) or "" --~ end --~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1) --~ function file.removesuffix(name) ---~ return pattern:match(name) +--~ return lpegmatch(pattern,name) --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1 --~ function file.basename(name) ---~ return pattern:match(name) or name +--~ return lpegmatch(pattern,name) or name --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1 --~ function file.dirname(name) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) +--~ return sub(name,1,p-2) --~ else --~ return "" --~ end @@ -1855,7 +2300,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.addsuffix(name, suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then --~ return name --~ else @@ -1866,9 +2311,9 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.replacesuffix(name,suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) .. "." .. suffix +--~ return sub(name,1,p-2) .. "." .. suffix --~ else --~ return name .. "." .. suffix --~ end @@ -1877,11 +2322,11 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1 --~ function file.nameonly(name) ---~ local a, b = pattern:match(name) +--~ local a, b = lpegmatch(pattern,name) --~ if b then ---~ return name:sub(a,b-2) +--~ return sub(name,a,b-2) --~ elseif a then ---~ return name:sub(a) +--~ return sub(name,a) --~ else --~ return name --~ end @@ -1915,11 +2360,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":") -- ./name ../name /name c: :// name/name function file.is_qualified_path(filename) - return qualified:match(filename) + return lpegmatch(qualified,filename) ~= nil end function file.is_rootbased_path(filename) - return rootbased:match(filename) + return lpegmatch(rootbased,filename) ~= nil end local slash = lpeg.S("\\/") @@ -1932,16 +2377,25 @@ local base = lpeg.C((1-suffix)^0) local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc("")) function file.splitname(str) -- returns drive, path, base, suffix - return pattern:match(str) + return lpegmatch(pattern,str) end --- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end -- -- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } -- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } -- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } -- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } +--~ -- todo: +--~ +--~ if os.type == "windows" then +--~ local currentdir = lfs.currentdir +--~ function lfs.currentdir() +--~ return (gsub(currentdir(),"\\","/")) +--~ end +--~ end + end -- of closure @@ -2006,7 +2460,7 @@ end function file.loadchecksum(name) if md5 then local data = io.loaddata(name .. ".md5") - return data and data:gsub("%s","") + return data and (gsub(data,"%s","")) end return nil end @@ -2025,19 +2479,168 @@ end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['l-dir'] = { +if not modules then modules = { } end modules ['l-url'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local char, gmatch, gsub = string.char, string.gmatch, string.gsub +local tonumber, type = tonumber, type +local lpegmatch = lpeg.match + +-- from the spec (on the web): +-- +-- foo://example.com:8042/over/there?name=ferret#nose +-- \_/ \______________/\_________/ \_________/ \__/ +-- | | | | | +-- scheme authority path query fragment +-- | _____________________|__ +-- / \ / \ +-- urn:example:animal:ferret:nose + +url = url or { } + +local function tochar(s) + return char(tonumber(s,16)) +end + +local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1) + +local hexdigit = lpeg.R("09","AF","af") +local plus = lpeg.P("+") +local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar) + +-- we assume schemes with more than 1 character (in order to avoid problems with windows disks) + +local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("") +local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("") +local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("") +local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("") +local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("") + +local parser = lpeg.Ct(scheme * authority * path * query * fragment) + +-- todo: reconsider Ct as we can as well have five return values (saves a table) +-- so we can have two parsers, one with and one without + +function url.split(str) + return (type(str) == "string" and lpegmatch(parser,str)) or str +end + +-- todo: cache them + +function url.hashed(str) + local s = url.split(str) + local somescheme = s[1] ~= "" + return { + scheme = (somescheme and s[1]) or "file", + authority = s[2], + path = s[3], + query = s[4], + fragment = s[5], + original = str, + noscheme = not somescheme, + } +end + +function url.hasscheme(str) + return url.split(str)[1] ~= "" +end + +function url.addscheme(str,scheme) + return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str) +end + +function url.construct(hash) + local fullurl = hash.sheme .. "://".. hash.authority .. hash.path + if hash.query then + fullurl = fullurl .. "?".. hash.query + end + if hash.fragment then + fullurl = fullurl .. "?".. hash.fragment + end + return fullurl +end + +function url.filename(filename) + local t = url.hashed(filename) + return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename +end + +function url.query(str) + if type(str) == "string" then + local t = { } + for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do + t[k] = v + end + return t + else + return str + end +end + +--~ print(url.filename("file:///c:/oeps.txt")) +--~ print(url.filename("c:/oeps.txt")) +--~ print(url.filename("file:///oeps.txt")) +--~ print(url.filename("file:///etc/test.txt")) +--~ print(url.filename("/oeps.txt")) + +--~ from the spec on the web (sort of): +--~ +--~ function test(str) +--~ print(table.serialize(url.hashed(str))) +--~ end +--~ +--~ test("%56pass%20words") +--~ test("file:///c:/oeps.txt") +--~ test("file:///c|/oeps.txt") +--~ test("file:///etc/oeps.txt") +--~ test("file://./etc/oeps.txt") +--~ test("file:////etc/oeps.txt") +--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt") +--~ test("http://www.ietf.org/rfc/rfc2396.txt") +--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what") +--~ test("mailto:John.Doe@example.com") +--~ test("news:comp.infosystems.www.servers.unix") +--~ test("tel:+1-816-555-1212") +--~ test("telnet://192.0.2.16:80/") +--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2") +--~ test("/etc/passwords") +--~ test("http://www.pragma-ade.com/spaced%20name") + +--~ test("zip:///oeps/oeps.zip#bla/bla.tex") +--~ test("zip:///oeps/oeps.zip?bla/bla.tex") + + +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['l-dir'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- dir.expand_name will be merged with cleanpath and collapsepath + local type = type -local find, gmatch = string.find, string.gmatch +local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local lpegmatch = lpeg.match dir = dir or { } +-- handy + +function dir.current() + return (gsub(lfs.currentdir(),"\\","/")) +end + -- optimizing for no string.find (*) does not save time local attributes = lfs.attributes @@ -2068,6 +2671,35 @@ end dir.glob_pattern = glob_pattern +local function collect_pattern(path,patt,recurse,result) + local ok, scanner + result = result or { } + if path == "/" then + ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe + else + ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe + end + if ok and type(scanner) == "function" then + if not find(path,"/$") then path = path .. '/' end + for name in scanner do + local full = path .. name + local attr = attributes(full) + local mode = attr.mode + if mode == 'file' then + if find(full,patt) then + result[name] = attr + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + attr.list = collect_pattern(full,patt,recurse) + result[name] = attr + end + end + end + return result +end + +dir.collect_pattern = collect_pattern + local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V local pattern = Ct { @@ -2087,29 +2719,48 @@ local filter = Cs ( ( )^0 ) local function glob(str,t) - if type(str) == "table" then - local t = t or { } - for s=1,#str do - glob(str[s],t) + if type(t) == "function" then + if type(str) == "table" then + for s=1,#str do + glob(str[s],t) + end + elseif lfs.isfile(str) then + t(str) + else + local split = lpegmatch(pattern,str) + if split then + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,t) + end end - return t - elseif lfs.isfile(str) then - local t = t or { } - t[#t+1] = str - return t else - local split = pattern:match(str) - if split then + if type(str) == "table" then + local t = t or { } + for s=1,#str do + glob(str[s],t) + end + return t + elseif lfs.isfile(str) then local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = filter:match(start .. base) - glob_pattern(start,result,recurse,action) + t[#t+1] = str return t else - return { } + local split = lpegmatch(pattern,str) + if split then + local t = t or { } + local action = action or function(name) t[#t+1] = name end + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,action) + return t + else + return { } + end end end end @@ -2171,11 +2822,12 @@ end local make_indeed = true -- false -if string.find(os.getenv("PATH"),";") then +if string.find(os.getenv("PATH"),";") then -- os.type == "windows" function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2186,13 +2838,13 @@ if string.find(os.getenv("PATH"),";") then end local first, middle, last local drive = false - first, middle, last = str:match("^(//)(//*)(.*)$") + first, middle, last = match(str,"^(//)(//*)(.*)$") if first then -- empty network path == local path else - first, last = str:match("^(//)/*(.-)$") + first, last = match(str,"^(//)/*(.-)$") if first then - middle, last = str:match("([^/]+)/+(.-)$") + middle, last = match(str,"([^/]+)/+(.-)$") if middle then pth = "//" .. middle else @@ -2200,11 +2852,11 @@ if string.find(os.getenv("PATH"),";") then last = "" end else - first, middle, last = str:match("^([a-zA-Z]:)(/*)(.-)$") + first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") if first then pth, drive = first .. middle, true else - middle, last = str:match("^(/*)(.-)$") + middle, last = match(str,"^(/*)(.-)$") if not middle then last = str end @@ -2238,34 +2890,31 @@ if string.find(os.getenv("PATH"),";") then --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) - local first, nothing, last = str:match("^(//)(//*)(.*)$") + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath + local first, nothing, last = match(str,"^(//)(//*)(.*)$") if first then - first = lfs.currentdir() .. "/" - first = first:gsub("\\","/") + first = dir.current() .. "/" end if not first then - first, last = str:match("^(//)/*(.*)$") + first, last = match(str,"^(//)/*(.*)$") end if not first then - first, last = str:match("^([a-zA-Z]:)(.*)$") + first, last = match(str,"^([a-zA-Z]:)(.*)$") if first and not find(last,"^/") then local d = lfs.currentdir() if lfs.chdir(first) then - first = lfs.currentdir() - first = first:gsub("\\","/") + first = dir.current() end lfs.chdir(d) end end if not first then - first, last = lfs.currentdir(), str - first = first:gsub("\\","/") + first, last = dir.current(), str end - last = last:gsub("//","/") - last = last:gsub("/%./","/") - last = last:gsub("^/*","") - first = first:gsub("/*$","") + last = gsub(last,"//","/") + last = gsub(last,"/%./","/") + last = gsub(last,"^/*","") + first = gsub(first,"/*$","") if last == "" then return first else @@ -2276,8 +2925,9 @@ if string.find(os.getenv("PATH"),";") then else function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2286,7 +2936,7 @@ else end end end - str = str:gsub("/+","/") + str = gsub(str,"/+","/") if find(str,"^/") then pth = "/" for s in gmatch(str,"[^/]+") do @@ -2320,12 +2970,12 @@ else --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath if not find(str,"^/") then str = lfs.currentdir() .. "/" .. str end - str = str:gsub("//","/") - str = str:gsub("/%./","/") + str = gsub(str,"//","/") + str = gsub(str,"/%./","/") return str end @@ -2340,7 +2990,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-boolean'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2401,7 +3051,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-math'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2448,7 +3098,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-utils'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2456,6 +3106,10 @@ if not modules then modules = { } end modules ['l-utils'] = { -- hm, quite unreadable +local gsub = string.gsub +local concat = table.concat +local type, next = type, next + if not utils then utils = { } end if not utils.merger then utils.merger = { } end if not utils.lua then utils.lua = { } end @@ -2493,7 +3147,7 @@ function utils.merger._self_load_(name) end if data and utils.merger.strip_comment then -- saves some 20K - data = data:gsub("%-%-~[^\n\r]*[\r\n]", "") + data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "") end return data or "" end @@ -2511,7 +3165,7 @@ end function utils.merger._self_swap_(data,code) if data ~= "" then - return (data:gsub(utils.merger.pattern, function(s) + return (gsub(data,utils.merger.pattern, function(s) return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n" end, 1)) else @@ -2521,8 +3175,8 @@ end --~ stripper: --~ ---~ data = string.gsub(data,"%-%-~[^\n]*\n","") ---~ data = string.gsub(data,"\n\n+","\n") +--~ data = gsub(data,"%-%-~[^\n]*\n","") +--~ data = gsub(data,"\n\n+","\n") function utils.merger._self_libs_(libs,list) local result, f, frozen = { }, nil, false @@ -2530,9 +3184,10 @@ function utils.merger._self_libs_(libs,list) if type(libs) == 'string' then libs = { libs } end if type(list) == 'string' then list = { list } end local foundpath = nil - for _, lib in ipairs(libs) do - for _, pth in ipairs(list) do - pth = string.gsub(pth,"\\","/") -- file.clean_path + for i=1,#libs do + local lib = libs[i] + for j=1,#list do + local pth = gsub(list[j],"\\","/") -- file.clean_path utils.report("checking library path %s",pth) local name = pth .. "/" .. lib if lfs.isfile(name) then @@ -2544,7 +3199,8 @@ function utils.merger._self_libs_(libs,list) if foundpath then utils.report("using library path %s",foundpath) local right, wrong = { }, { } - for _, lib in ipairs(libs) do + for i=1,#libs do + local lib = libs[i] local fullname = foundpath .. "/" .. lib if lfs.isfile(fullname) then -- right[#right+1] = lib @@ -2558,15 +3214,15 @@ function utils.merger._self_libs_(libs,list) end end if #right > 0 then - utils.report("merged libraries: %s",table.concat(right," ")) + utils.report("merged libraries: %s",concat(right," ")) end if #wrong > 0 then - utils.report("skipped libraries: %s",table.concat(wrong," ")) + utils.report("skipped libraries: %s",concat(wrong," ")) end else utils.report("no valid library path found") end - return table.concat(result, "\n\n") + return concat(result, "\n\n") end function utils.merger.selfcreate(libs,list,target) @@ -2624,16 +3280,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end + aux = aux or { } local concat, format, gmatch = table.concat, string.format, string.gmatch local tostring, type = tostring, type +local lpegmatch = lpeg.match + +local P, R, V = lpeg.P, lpeg.R, lpeg.V + +local escape, left, right = P("\\"), P('{'), P('}') + +lpeg.patterns.balanced = P { + [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, + [2] = left * V(1) * right +} local space = lpeg.P(' ') local equal = lpeg.P("=") @@ -2641,7 +3309,7 @@ local comma = lpeg.P(",") local lbrace = lpeg.P("{") local rbrace = lpeg.P("}") local nobrace = 1 - (lbrace+rbrace) -local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace } +local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace } local spaces = space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) @@ -2679,13 +3347,13 @@ function aux.make_settings_to_hash_pattern(set,how) end end -function aux.settings_to_hash(str) +function aux.settings_to_hash(str,existing) if str and str ~= "" then - hash = { } + hash = existing or { } if moretolerant then - pattern_b_s:match(str) + lpegmatch(pattern_b_s,str) else - pattern_a_s:match(str) + lpegmatch(pattern_a_s,str) end return hash else @@ -2693,39 +3361,41 @@ function aux.settings_to_hash(str) end end -function aux.settings_to_hash_tolerant(str) +function aux.settings_to_hash_tolerant(str,existing) if str and str ~= "" then - hash = { } - pattern_b_s:match(str) + hash = existing or { } + lpegmatch(pattern_b_s,str) return hash else return { } end end -function aux.settings_to_hash_strict(str) +function aux.settings_to_hash_strict(str,existing) if str and str ~= "" then - hash = { } - pattern_c_s:match(str) + hash = existing or { } + lpegmatch(pattern_c_s,str) return next(hash) and hash else return nil end end -local seperator = comma * space^0 +local separator = comma * space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) -local pattern = lpeg.Ct(value*(seperator*value)^0) +local pattern = lpeg.Ct(value*(separator*value)^0) -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored aux.settings_to_array_pattern = pattern +-- we could use a weak table as cache + function aux.settings_to_array(str) if not str or str == "" then return { } else - return pattern:match(str) + return lpegmatch(pattern,str) end end @@ -2734,10 +3404,10 @@ local function set(t,v) end local value = lpeg.P(lpeg.Carg(1)*value) / set -local pattern = value*(seperator*value)^0 * lpeg.Carg(1) +local pattern = value*(separator*value)^0 * lpeg.Carg(1) function aux.add_settings_to_array(t,str) - return pattern:match(str, nil, t) + return lpegmatch(pattern,str,nil,t) end function aux.hash_to_string(h,separator,yes,no,strict,omit) @@ -2785,6 +3455,13 @@ function aux.settings_to_set(str,t) return t end +local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace +local pattern = lpeg.Ct((space + value)^0) + +function aux.arguments_to_table(str) + return lpegmatch(pattern,str) +end + -- temporary here function aux.getparameters(self,class,parentclass,settings) @@ -2793,36 +3470,31 @@ function aux.getparameters(self,class,parentclass,settings) sc = table.clone(self[parent]) self[class] = sc end - aux.add_settings_to_array(sc, settings) + aux.settings_to_hash(settings,sc) end -- temporary here -local digit = lpeg.R("09") -local period = lpeg.P(".") -local zero = lpeg.P("0") - ---~ local finish = lpeg.P(-1) ---~ local nodigit = (1-digit) + finish ---~ local case_1 = (period * zero^1 * #nodigit)/"" -- .000 ---~ local case_2 = (period * (1-(zero^0/"") * #nodigit)^1 * (zero^0/"") * nodigit) -- .010 .10 .100100 - +local digit = lpeg.R("09") +local period = lpeg.P(".") +local zero = lpeg.P("0") local trailingzeros = zero^0 * -digit -- suggested by Roberto R -local case_1 = period * trailingzeros / "" -local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") - -local number = digit^1 * (case_1 + case_2) -local stripper = lpeg.Cs((number + 1)^0) +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = lpeg.Cs((number + 1)^0) --~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" --~ collectgarbage("collect") --~ str = string.rep(sample,10000) --~ local ts = os.clock() ---~ stripper:match(str) ---~ print(#str, os.clock()-ts, stripper:match(sample)) +--~ lpegmatch(stripper,str) +--~ print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + +lpeg.patterns.strip_zeros = stripper function aux.strip_zeros(str) - return stripper:match(str) + return lpegmatch(stripper,str) end function aux.definetable(target) -- defines undefined tables @@ -2846,85 +3518,433 @@ function aux.accesstable(target) return t end +--~ function string.commaseparated(str) +--~ return gmatch(str,"([^,%s]+)") +--~ end + +-- as we use this a lot ... + +--~ function aux.cachefunction(action,weak) +--~ local cache = { } +--~ if weak then +--~ setmetatable(cache, { __mode = "kv" } ) +--~ end +--~ local function reminder(str) +--~ local found = cache[str] +--~ if not found then +--~ found = action(str) +--~ cache[str] = found +--~ end +--~ return found +--~ end +--~ return reminder, cache +--~ end + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['lxml-tab'] = { +if not modules then modules = { } end modules ['trac-tra'] = { version = 1.001, - comment = "this module is the basis for the lxml-* ones", + comment = "companion to trac-tra.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } ---[[ldx-- -

The parser used here is inspired by the variant discussed in the lua book, but -handles comment and processing instructions, has a different structure, provides -parent access; a first version used different trickery but was less optimized to we -went this route. First we had a find based parser, now we have an based one. -The find based parser can be found in l-xml-edu.lua along with other older code.

- -

Expecially the lpath code is experimental, we will support some of xpath, but -only things that make sense for us; as compensation it is possible to hook in your -own functions. Apart from preprocessing content for we also need -this module for process management, like handling and -files.

- - -a/b/c /*/c -a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) -a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) - - -

Beware, the interface may change. For instance at, ns, tg, dt may get more -verbose names. Once the code is stable we will also remove some tracing and -optimize the code.

---ldx]]-- +-- the tag is kind of generic and used for functions that are not +-- bound to a variable, like node.new, node.copy etc (contrary to for instance +-- node.has_attribute which is bound to a has_attribute local variable in mkiv) -xml = xml or { } +local debug = require "debug" ---~ local xml = xml +local getinfo = debug.getinfo +local type, next = type, next +local concat = table.concat +local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub -local concat, remove, insert = table.concat, table.remove, table.insert -local type, next, setmetatable = type, next, setmetatable -local format, lower, find = string.format, string.lower, string.find +debugger = debugger or { } ---[[ldx-- -

This module can be used stand alone but also inside in -which case it hooks into the tracker code. Therefore we provide a few -functions that set the tracers.

---ldx]]-- +local counters = { } +local names = { } -local trace_remap = false +-- one -if trackers then - trackers.register("xml.remap", function(v) trace_remap = v end) +local function hook() + local f = getinfo(2,"f").func + local n = getinfo(2,"Sn") +-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end + if f then + local cf = counters[f] + if cf == nil then + counters[f] = 1 + names[f] = n + else + counters[f] = cf + 1 + end + end end - -function xml.settrace(str,value) - if str == "remap" then - trace_remap = value or false +local function getname(func) + local n = names[func] + if n then + if n.what == "C" then + return n.name or '' + else + -- source short_src linedefined what name namewhat nups func + local name = n.name or n.namewhat or n.what + if not name or name == "" then name = "?" end + return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name) + end + else + return "unknown" + end +end +function debugger.showstats(printer,threshold) + printer = printer or texio.write or print + threshold = threshold or 0 + local total, grandtotal, functions = 0, 0, 0 + printer("\n") -- ugly but ok + -- table.sort(counters) + for func, count in next, counters do + if count > threshold then + local name = getname(func) + if not find(name,"for generator") then + printer(format("%8i %s", count, name)) + total = total + count + end + end + grandtotal = grandtotal + count + functions = functions + 1 end + printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) end ---[[ldx-- -

First a hack to enable namespace resolving. A namespace is characterized by -a . The following function associates a namespace prefix with a -pattern. We use , which in this case is more than twice as fast as a -find based solution where we loop over an array of patterns. Less code and -much cleaner.

---ldx]]-- +-- two -xml.xmlns = xml.xmlns or { } +--~ local function hook() +--~ local n = getinfo(2) +--~ if n.what=="C" and not n.name then +--~ local f = tostring(debug.traceback()) +--~ local cf = counters[f] +--~ if cf == nil then +--~ counters[f] = 1 +--~ names[f] = n +--~ else +--~ counters[f] = cf + 1 +--~ end +--~ end +--~ end +--~ function debugger.showstats(printer,threshold) +--~ printer = printer or texio.write or print +--~ threshold = threshold or 0 +--~ local total, grandtotal, functions = 0, 0, 0 +--~ printer("\n") -- ugly but ok +--~ -- table.sort(counters) +--~ for func, count in next, counters do +--~ if count > threshold then +--~ printer(format("%8i %s", count, func)) +--~ total = total + count +--~ end +--~ grandtotal = grandtotal + count +--~ functions = functions + 1 +--~ end +--~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) +--~ end -local check = lpeg.P(false) -local parse = check +-- rest ---[[ldx-- -

The next function associates a namespace prefix with an . This -normally happens independent of parsing.

+function debugger.savestats(filename,threshold) + local f = io.open(filename,'w') + if f then + debugger.showstats(function(str) f:write(str) end,threshold) + f:close() + end +end + +function debugger.enable() + debug.sethook(hook,"c") +end + +function debugger.disable() + debug.sethook() +--~ counters[debug.getinfo(2,"f").func] = nil +end + +function debugger.tracing() + local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0 + if n > 0 then + function debugger.tracing() return true end ; return true + else + function debugger.tracing() return false end ; return false + end +end + +--~ debugger.enable() + +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) + +--~ debugger.disable() + +--~ print("") +--~ debugger.showstats() +--~ print("") +--~ debugger.showstats(print,3) + +setters = setters or { } +setters.data = setters.data or { } + +--~ local function set(t,what,value) +--~ local data, done = t.data, t.done +--~ if type(what) == "string" then +--~ what = aux.settings_to_array(what) -- inefficient but ok +--~ end +--~ for i=1,#what do +--~ local w = what[i] +--~ for d, f in next, data do +--~ if done[d] then +--~ -- prevent recursion due to wildcards +--~ elseif find(d,w) then +--~ done[d] = true +--~ for i=1,#f do +--~ f[i](value) +--~ end +--~ end +--~ end +--~ end +--~ end + +local function set(t,what,value) + local data, done = t.data, t.done + if type(what) == "string" then + what = aux.settings_to_hash(what) -- inefficient but ok + end + for w, v in next, what do + if v == "" then + v = value + else + v = toboolean(v) + end + for d, f in next, data do + if done[d] then + -- prevent recursion due to wildcards + elseif find(d,w) then + done[d] = true + for i=1,#f do + f[i](v) + end + end + end + end +end + +local function reset(t) + for d, f in next, t.data do + for i=1,#f do + f[i](false) + end + end +end + +local function enable(t,what) + set(t,what,true) +end + +local function disable(t,what) + local data = t.data + if not what or what == "" then + t.done = { } + reset(t) + else + set(t,what,false) + end +end + +function setters.register(t,what,...) + local data = t.data + what = lower(what) + local w = data[what] + if not w then + w = { } + data[what] = w + end + for _, fnc in next, { ... } do + local typ = type(fnc) + if typ == "function" then + w[#w+1] = fnc + elseif typ == "string" then + w[#w+1] = function(value) set(t,fnc,value,nesting) end + end + end +end + +function setters.enable(t,what) + local e = t.enable + t.enable, t.done = enable, { } + enable(t,string.simpleesc(tostring(what))) + t.enable, t.done = e, { } +end + +function setters.disable(t,what) + local e = t.disable + t.disable, t.done = disable, { } + disable(t,string.simpleesc(tostring(what))) + t.disable, t.done = e, { } +end + +function setters.reset(t) + t.done = { } + reset(t) +end + +function setters.list(t) -- pattern + local list = table.sortedkeys(t.data) + local user, system = { }, { } + for l=1,#list do + local what = list[l] + if find(what,"^%*") then + system[#system+1] = what + else + user[#user+1] = what + end + end + return user, system +end + +function setters.show(t) + commands.writestatus("","") + local list = setters.list(t) + for k=1,#list do + commands.writestatus(t.name,list[k]) + end + commands.writestatus("","") +end + +-- we could have used a bit of oo and the trackers:enable syntax but +-- there is already a lot of code around using the singular tracker + +-- we could make this into a module + +function setters.new(name) + local t + t = { + data = { }, + name = name, + enable = function(...) setters.enable (t,...) end, + disable = function(...) setters.disable (t,...) end, + register = function(...) setters.register(t,...) end, + list = function(...) setters.list (t,...) end, + show = function(...) setters.show (t,...) end, + } + setters.data[name] = t + return t +end + +trackers = setters.new("trackers") +directives = setters.new("directives") +experiments = setters.new("experiments") + +-- nice trick: we overload two of the directives related functions with variants that +-- do tracing (itself using a tracker) .. proof of concept + +local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) +local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) + +local e = directives.enable +local d = directives.disable + +function directives.enable(...) + commands.writestatus("directives","enabling: %s",concat({...}," ")) + e(...) +end + +function directives.disable(...) + commands.writestatus("directives","disabling: %s",concat({...}," ")) + d(...) +end + +local e = experiments.enable +local d = experiments.disable + +function experiments.enable(...) + commands.writestatus("experiments","enabling: %s",concat({...}," ")) + e(...) +end + +function experiments.disable(...) + commands.writestatus("experiments","disabling: %s",concat({...}," ")) + d(...) +end + +-- a useful example + +directives.register("system.nostatistics", function(v) + statistics.enable = not v +end) + + + +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['lxml-tab'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc +-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the +-- trouble + +local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) + +--[[ldx-- +

The parser used here is inspired by the variant discussed in the lua book, but +handles comment and processing instructions, has a different structure, provides +parent access; a first version used different trickery but was less optimized to we +went this route. First we had a find based parser, now we have an based one. +The find based parser can be found in l-xml-edu.lua along with other older code.

+ +

Beware, the interface may change. For instance at, ns, tg, dt may get more +verbose names. Once the code is stable we will also remove some tracing and +optimize the code.

+--ldx]]-- + +xml = xml or { } + +--~ local xml = xml + +local concat, remove, insert = table.concat, table.remove, table.insert +local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber +local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub +local utfchar = unicode.utf8.char +local lpegmatch = lpeg.match +local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs + +--[[ldx-- +

First a hack to enable namespace resolving. A namespace is characterized by +a . The following function associates a namespace prefix with a +pattern. We use , which in this case is more than twice as fast as a +find based solution where we loop over an array of patterns. Less code and +much cleaner.

+--ldx]]-- + +xml.xmlns = xml.xmlns or { } + +local check = P(false) +local parse = check + +--[[ldx-- +

The next function associates a namespace prefix with an . This +normally happens independent of parsing.

xml.registerns("mml","mathml") @@ -2932,8 +3952,8 @@ xml.registerns("mml","mathml") --ldx]]-- function xml.registerns(namespace, pattern) -- pattern can be an lpeg - check = check + lpeg.C(lpeg.P(lower(pattern))) / namespace - parse = lpeg.P { lpeg.P(check) + 1 * lpeg.V(1) } + check = check + C(P(lower(pattern))) / namespace + parse = P { P(check) + 1 * V(1) } end --[[ldx-- @@ -2947,7 +3967,7 @@ xml.checkns("m","http://www.w3.org/mathml") --ldx]]-- function xml.checkns(namespace,url) - local ns = parse:match(lower(url)) + local ns = lpegmatch(parse,lower(url)) if ns and namespace ~= ns then xml.xmlns[namespace] = ns end @@ -2965,7 +3985,7 @@ This returns mml. --ldx]]-- function xml.resolvens(url) - return parse:match(lower(url)) or "" + return lpegmatch(parse,lower(url)) or "" end --[[ldx-- @@ -3004,27 +4024,36 @@ local x = xml.convert(somestring)

An optional second boolean argument tells this function not to create a root element.

---ldx]]-- -xml.strip_cm_and_dt = false -- an extra global flag, in case we have many includes +

Valid entities are:

+ + + + + + +--ldx]]-- -- not just one big nested table capture (lpeg overflow) local nsremap, resolvens = xml.xmlns, xml.resolvens -local stack, top, dt, at, xmlns, errorstr, entities = {}, {}, {}, {}, {}, nil, {} +local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { } +local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false +local dcache, hcache, acache = { }, { }, { } -local mt = { __tostring = xml.text } +local mt = { } -function xml.check_error(top,toclose) - return "" +function initialize_mt(root) + mt = { __index = root } -- will be redefined later end -local strip = false -local cleanup = false +function xml.setproperty(root,k,v) + getmetatable(root).__index[k] = v +end -function xml.set_text_cleanup(fnc) - cleanup = fnc +function xml.check_error(top,toclose) + return "" end local function add_attribute(namespace,tag,value) @@ -3034,12 +4063,31 @@ local function add_attribute(namespace,tag,value) if tag == "xmlns" then xmlns[#xmlns+1] = resolvens(value) at[tag] = value + elseif namespace == "" then + at[tag] = value elseif namespace == "xmlns" then xml.checkns(tag,value) at["xmlns:" .. tag] = value else - at[tag] = value + -- for the moment this way: + at[namespace .. ":" .. tag] = value + end +end + +local function add_empty(spacing, namespace, tag) + if #spacing > 0 then + dt[#dt+1] = spacing + end + local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace + top = stack[#stack] + dt = top.dt + local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } + dt[#dt+1] = t + setmetatable(t, mt) + if at.xmlns then + remove(xmlns) end + at = { } end local function add_begin(spacing, namespace, tag) @@ -3067,28 +4115,12 @@ local function add_end(spacing, namespace, tag) end dt = top.dt dt[#dt+1] = toclose - dt[0] = top + -- dt[0] = top -- nasty circular reference when serializing table if toclose.at.xmlns then remove(xmlns) end end -local function add_empty(spacing, namespace, tag) - if #spacing > 0 then - dt[#dt+1] = spacing - end - local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace - top = stack[#stack] - dt = top.dt - local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } - dt[#dt+1] = t - setmetatable(t, mt) - if at.xmlns then - remove(xmlns) - end - at = { } -end - local function add_text(text) if cleanup and #text > 0 then dt[#dt+1] = cleanup(text) @@ -3104,7 +4136,7 @@ local function add_special(what, spacing, text) if strip and (what == "@cm@" or what == "@dt@") then -- forget it else - dt[#dt+1] = { special=true, ns="", tg=what, dt={text} } + dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } } end end @@ -3112,42 +4144,260 @@ local function set_message(txt) errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","") end -local P, S, R, C, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V +local reported_attribute_errors = { } -local space = S(' \r\n\t') -local open = P('<') -local close = P('>') -local squote = S("'") -local dquote = S('"') -local equal = P('=') -local slash = P('/') -local colon = P(':') -local valid = R('az', 'AZ', '09') + S('_-.') -local name_yes = C(valid^1) * colon * C(valid^1) -local name_nop = C(P(true)) * C(valid^1) -local name = name_yes + name_nop +local function attribute_value_error(str) + if not reported_attribute_errors[str] then + logs.report("xml","invalid attribute value: %q",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end +local function attribute_specification_error(str) + if not reported_attribute_errors[str] then + logs.report("xml","invalid attribute specification: %q",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end -local utfbom = P('\000\000\254\255') + P('\255\254\000\000') + - P('\255\254') + P('\254\255') + P('\239\187\191') -- no capture +function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or format("&%s;",str) end +function xml.unknown_hex_entity_format(str) return format("&#x%s;",str) end +function xml.unknown_any_entity_format(str) return format("&#x%s;",str) end -local spacing = C(space^0) -local justtext = C((1-open)^1) -local somespace = space^1 -local optionalspace = space^0 +local function fromhex(s) + local n = tonumber(s,16) + if n then + return utfchar(n) + else + return format("h:%s",s), true + end +end -local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -local attribute = (somespace * name * optionalspace * equal * optionalspace * value) / add_attribute -local attributes = attribute^0 +local function fromdec(s) + local n = tonumber(s) + if n then + return utfchar(n) + else + return format("d:%s",s), true + end +end -local text = justtext / add_text -local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example +-- one level expansion (simple case), no checking done -local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty -local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin -local endelement = (spacing * open * slash * name * optionalspace * close) / add_end +local rest = (1-P(";"))^0 +local many = P(1)^0 -local begincomment = open * P("!--") -local endcomment = P("--") * close +local parsedentity = + P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) + + (P("#x")*(many/fromhex) + P("#")*(many/fromdec)) + +-- parsing in the xml file + +local predefined_unified = { + [38] = "&", + [42] = """, + [47] = "'", + [74] = "<", + [76] = "&gr;", +} + +local predefined_simplified = { + [38] = "&", amp = "&", + [42] = '"', quot = '"', + [47] = "'", apos = "'", + [74] = "<", lt = "<", + [76] = ">", gt = ">", +} + +local function handle_hex_entity(str) + local h = hcache[str] + if not h then + local n = tonumber(str,16) + h = unify_predefined and predefined_unified[n] + if h then + if trace_entities then + logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h) + end + elseif utfize then + h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or "" + if not n then + logs.report("xml","utfize, ignoring hex entity &#x%s;",str) + elseif trace_entities then + logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h) + end + else + if trace_entities then + logs.report("xml","found entity &#x%s;",str) + end + h = "&#x" .. str .. ";" + end + hcache[str] = h + end + return h +end + +local function handle_dec_entity(str) + local d = dcache[str] + if not d then + local n = tonumber(str) + d = unify_predefined and predefined_unified[n] + if d then + if trace_entities then + logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d) + end + elseif utfize then + d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or "" + if not n then + logs.report("xml","utfize, ignoring dec entity &#%s;",str) + elseif trace_entities then + logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h) + end + else + if trace_entities then + logs.report("xml","found entity &#%s;",str) + end + d = "&#" .. str .. ";" + end + dcache[str] = d + end + return d +end + +xml.parsedentitylpeg = parsedentity + +local function handle_any_entity(str) + if resolve then + local a = acache[str] -- per instance ! todo + if not a then + a = resolve_predefined and predefined_simplified[str] + if a then + -- one of the predefined + elseif type(resolve) == "function" then + a = resolve(str) or entities[str] + else + a = entities[str] + end + if a then + if trace_entities then + logs.report("xml","resolved entity &%s; -> %s (internal)",str,a) + end + a = lpegmatch(parsedentity,a) or a + else + if xml.unknown_any_entity_format then + a = xml.unknown_any_entity_format(str) or "" + end + if a then + if trace_entities then + logs.report("xml","resolved entity &%s; -> %s (external)",str,a) + end + else + if trace_entities then + logs.report("xml","keeping entity &%s;",str) + end + if str == "" then + a = "&error;" + else + a = "&" .. str .. ";" + end + end + end + acache[str] = a + elseif trace_entities then + if not acache[str] then + logs.report("xml","converting entity &%s; into %s",str,a) + acache[str] = a + end + end + return a + else + local a = acache[str] + if not a then + if trace_entities then + logs.report("xml","found entity &%s;",str) + end + a = resolve_predefined and predefined_simplified[str] + if a then + -- one of the predefined + acache[str] = a + elseif str == "" then + a = "&error;" + acache[str] = a + else + a = "&" .. str .. ";" + acache[str] = a + end + end + return a + end +end + +local function handle_end_entity(chr) + logs.report("xml","error in entity, %q found instead of ';'",chr) +end + +local space = S(' \r\n\t') +local open = P('<') +local close = P('>') +local squote = S("'") +local dquote = S('"') +local equal = P('=') +local slash = P('/') +local colon = P(':') +local semicolon = P(';') +local ampersand = P('&') +local valid = R('az', 'AZ', '09') + S('_-.') +local name_yes = C(valid^1) * colon * C(valid^1) +local name_nop = C(P(true)) * C(valid^1) +local name = name_yes + name_nop +local utfbom = lpeg.patterns.utfbom -- no capture +local spacing = C(space^0) + +----- entitycontent = (1-open-semicolon)^0 +local anyentitycontent = (1-open-semicolon-space-close)^0 +local hexentitycontent = R("AF","af","09")^0 +local decentitycontent = R("09")^0 +local parsedentity = P("#")/"" * ( + P("x")/"" * (hexentitycontent/handle_hex_entity) + + (decentitycontent/handle_dec_entity) + ) + (anyentitycontent/handle_any_entity) +local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity)) + +local text_unparsed = C((1-open)^1) +local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1) + +local somespace = space^1 +local optionalspace = space^0 + +----- value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value +local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value + +local endofattributes = slash * close + close -- recovery of flacky html +local whatever = space * name * optionalspace * equal +local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error +----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error +----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error +local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error + +local attributevalue = value + wrongvalue + +local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute +----- attributes = (attribute)^0 + +local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0 + +local parsedtext = text_parsed / add_text +local unparsedtext = text_unparsed / add_text +local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example + +local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty +local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin +local endelement = (spacing * open * slash * name * optionalspace * close) / add_end + +local begincomment = open * P("!--") +local endcomment = P("--") * close local begininstruction = open * P("?") local endinstruction = P("?") * close local begincdata = open * P("![CDATA[") @@ -3157,19 +4407,27 @@ local someinstruction = C((1 - endinstruction)^0) local somecomment = C((1 - endcomment )^0) local somecdata = C((1 - endcdata )^0) -local function entity(k,v) entities[k] = v end +local function normalentity(k,v ) entities[k] = v end +local function systementity(k,v,n) entities[k] = v end +local function publicentity(k,v,n) entities[k] = v end local begindoctype = open * P("!DOCTYPE") local enddoctype = close local beginset = P("[") local endset = P("]") -local doctypename = C((1-somespace)^0) +local doctypename = C((1-somespace-close)^0) local elementdoctype = optionalspace * P(" & + cleanup = settings.text_cleanup + stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { } + acache, hcache, dcache = { }, { }, { } -- not stored + reported_attribute_errors = { } + if settings.parent_root then + mt = getmetatable(settings.parent_root) + else + initialize_mt(top) + end stack[#stack+1] = top top.dt = { } dt = top.dt if not data or data == "" then errorstr = "empty xml file" - elseif not grammar:match(data) then - errorstr = "invalid xml file" + elseif utfize or resolve then + if lpegmatch(grammar_parsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - parsed text" + end + elseif type(data) == "string" then + if lpegmatch(grammar_unparsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - unparsed text" + end else - errorstr = "" + errorstr = "invalid xml file - no text at all" end if errorstr and errorstr ~= "" then - result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={}, er = true } }, error = true } + result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } } setmetatable(stack, mt) - if xml.error_handler then xml.error_handler("load",errorstr) end + local error_handler = settings.error_handler + if error_handler == false then + -- no error message + else + error_handler = error_handler or xml.error_handler + if error_handler then + xml.error_handler("load",errorstr) + end + end else result = stack[1] end - if not no_root then - result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={}, entities = entities } + if not settings.no_root then + result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings } setmetatable(result, mt) local rdt = result.dt for k=1,#rdt do local v = rdt[k] if type(v) == "table" and not v.special then -- always table -) result.ri = k -- rootindex +v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this ! break end end end + if errorstr and errorstr ~= "" then + result.error = true + end return result end +xml.convert = xmlconvert + +function xml.inheritedconvert(data,xmldata) + local settings = xmldata.settings + settings.parent_root = xmldata -- to be tested + -- settings.no_root = true + local xc = xmlconvert(data,settings) + -- xc.settings = nil + -- xc.entities = nil + -- xc.special = nil + -- xc.ri = nil + -- print(xc.tg) + return xc +end + --[[ldx--

Packaging data in an xml like table is done with the following function. Maybe it will go away (when not used).

@@ -3243,7 +4557,7 @@ function xml.is_valid(root) end function xml.package(tag,attributes,data) - local ns, tg = tag:match("^(.-):?([^:]+)$") + local ns, tg = match(tag,"^(.-):?([^:]+)$") local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} } setmetatable(t, mt) return t @@ -3261,21 +4575,19 @@ the whole file first. The function accepts a string representing a filename or a file handle.

--ldx]]-- -function xml.load(filename) +function xml.load(filename,settings) + local data = "" if type(filename) == "string" then + -- local data = io.loaddata(filename) - -todo: check type in io.loaddata local f = io.open(filename,'r') if f then - local root = xml.convert(f:read("*all")) + data = f:read("*all") f:close() - return root - else - return xml.convert("") end elseif filename then -- filehandle - return xml.convert(filename:read("*all")) - else - return xml.convert("") + data = filename:read("*all") end + return xmlconvert(data,settings) end --[[ldx-- @@ -3283,9 +4595,11 @@ end valid trees, which is what the next function does.

--ldx]]-- +local no_root = { no_root = true } + function xml.toxml(data) if type(data) == "string" then - local root = { xml.convert(data,true) } + local root = { xmlconvert(data,no_root) } return (#root > 1 and root) or root[1] else return data @@ -3305,7 +4619,7 @@ local function copy(old,tables) if not tables[old] then tables[old] = new end - for k,v in pairs(old) do + for k,v in next, old do new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v end local mt = getmetatable(old) @@ -3330,175 +4644,12 @@ alternative.

-- todo: add when not present -local fallbackhandle = (tex and tex.sprint) or io.write - -local function serialize(e, handle, textconverter, attributeconverter, specialconverter, nocommands) - if not e then - return - elseif not nocommands then - local ec = e.command - if ec ~= nil then -- we can have all kind of types - if e.special then - local etg, edt = e.tg, e.dt - local spc = specialconverter and specialconverter[etg] - if spc then - local result = spc(edt[1]) - if result then - handle(result) - return - else - -- no need to handle any further - end - end - end - local xc = xml.command - if xc then - xc(e,ec) - return - end - end - end - handle = handle or fallbackhandle - local etg = e.tg - if etg then - if e.special then - local edt = e.dt - local spc = specialconverter and specialconverter[etg] - if spc then - local result = spc(edt[1]) - if result then - handle(result) - else - -- no need to handle any further - end - elseif etg == "@pi@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@cm@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@cd@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@dt@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@rt@" then - serialize(edt,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - else - local ens, eat, edt, ern = e.ns, e.at, e.dt, e.rn - local ats = eat and next(eat) and { } -- type test maybe faster - if ats then - if attributeconverter then - for k,v in next, eat do - ats[#ats+1] = format('%s=%q',k,attributeconverter(v)) - end - else - for k,v in next, eat do - ats[#ats+1] = format('%s=%q',k,v) - end - end - end - if ern and trace_remap and ern ~= ens then - ens = ern - end - if ens ~= "" then - if edt and #edt > 0 then - if ats then - -- handle(format("<%s:%s %s>",ens,etg,concat(ats," "))) - handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. ">") - else - -- handle(format("<%s:%s>",ens,etg)) - handle("<" .. ens .. ":" .. etg .. ">") - end - for i=1,#edt do - local e = edt[i] - if type(e) == "string" then - if textconverter then - handle(textconverter(e)) - else - handle(e) - end - else - serialize(e,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - -- handle(format("",ens,etg)) - handle("") - else - if ats then - -- handle(format("<%s:%s %s/>",ens,etg,concat(ats," "))) - handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. "/>") - else - -- handle(format("<%s:%s/>",ens,etg)) - handle("<" .. ens .. ":" .. etg .. "/>") - end - end - else - if edt and #edt > 0 then - if ats then - -- handle(format("<%s %s>",etg,concat(ats," "))) - handle("<" .. etg .. " " .. concat(ats," ") .. ">") - else - -- handle(format("<%s>",etg)) - handle("<" .. etg .. ">") - end - for i=1,#edt do - local ei = edt[i] - if type(ei) == "string" then - if textconverter then - handle(textconverter(ei)) - else - handle(ei) - end - else - serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - -- handle(format("",etg)) - handle("") - else - if ats then - -- handle(format("<%s %s/>",etg,concat(ats," "))) - handle("<" .. etg .. " " .. concat(ats," ") .. "/>") - else - -- handle(format("<%s/>",etg)) - handle("<" .. etg .. "/>") - end - end - end - end - elseif type(e) == "string" then - if textconverter then - handle(textconverter(e)) - else - handle(e) - end - else - for i=1,#e do - local ei = e[i] - if type(ei) == "string" then - if textconverter then - handle(textconverter(ei)) - else - handle(ei) - end - else - serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - end -end - -xml.serialize = serialize - function xml.checkbom(root) -- can be made faster if root.ri then local dt, found = root.dt, false for k=1,#dt do local v = dt[k] - if type(v) == "table" and v.special and v.tg == "@pi" and find(v.dt,"xml.*version=") then + if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then found = true break end @@ -3515,111 +4666,356 @@ end and then handle the lot.

--ldx]]-- -function xml.tostring(root) -- 25% overhead due to collecting - if root then - if type(root) == 'string' then - return root - elseif next(root) then -- next is faster than type (and >0 test) - local result = { } - serialize(root,function(s) result[#result+1] = s end) -- brrr, slow (direct printing is faster) - return concat(result,"") +-- new experimental reorganized serialize + +local function verbose_element(e,handlers) + local handle = handlers.handle + local serialize = handlers.serialize + local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn + local ats = eat and next(eat) and { } + if ats then + for k,v in next, eat do + ats[#ats+1] = format('%s=%q',k,v) end end - return "" -end - ---[[ldx-- -

The next function operated on the content only and needs a handle function -that accepts a string.

---ldx]]-- - -function xml.string(e,handle) - if not handle or (e.special and e.tg ~= "@rt@") then - -- nothing - elseif e.tg then - local edt = e.dt - if edt then + if ern and trace_remap and ern ~= ens then + ens = ern + end + if ens ~= "" then + if edt and #edt > 0 then + if ats then + handle("<",ens,":",etg," ",concat(ats," "),">") + else + handle("<",ens,":",etg,">") + end for i=1,#edt do - xml.string(edt[i],handle) + local e = edt[i] + if type(e) == "string" then + handle(e) + else + serialize(e,handlers) + end + end + handle("") + else + if ats then + handle("<",ens,":",etg," ",concat(ats," "),"/>") + else + handle("<",ens,":",etg,"/>") end end else - handle(e) + if edt and #edt > 0 then + if ats then + handle("<",etg," ",concat(ats," "),">") + else + handle("<",etg,">") + end + for i=1,#edt do + local ei = edt[i] + if type(ei) == "string" then + handle(ei) + else + serialize(ei,handlers) + end + end + handle("") + else + if ats then + handle("<",etg," ",concat(ats," "),"/>") + else + handle("<",etg,"/>") + end + end end end ---[[ldx-- -

How you deal with saving data depends on your preferences. For a 40 MB database -file the timing on a 2.3 Core Duo are as follows (time in seconds):

- - -1.3 : load data from file to string -6.1 : convert string into tree -5.3 : saving in file using xmlsave -6.8 : converting to string using xml.tostring -3.6 : saving converted string in file - +local function verbose_pi(e,handlers) + handlers.handle("") +end -

The save function is given below.

---ldx]]-- +local function verbose_comment(e,handlers) + handlers.handle("") +end -function xml.save(root,name) - local f = io.open(name,"w") - if f then - xml.serialize(root,function(s) f:write(s) end) - f:close() - end +local function verbose_cdata(e,handlers) + handlers.handle("") end ---[[ldx-- -

A few helpers:

---ldx]]-- +local function verbose_doctype(e,handlers) + handlers.handle("") +end -function xml.body(root) - return (root.ri and root.dt[root.ri]) or root +local function verbose_root(e,handlers) + handlers.serialize(e.dt,handlers) end -function xml.text(root) - return (root and xml.tostring(root)) or "" +local function verbose_text(e,handlers) + handlers.handle(e) end -function xml.content(root) -- bugged - return (root and root.dt and xml.tostring(root.dt)) or "" +local function verbose_document(e,handlers) + local serialize = handlers.serialize + local functions = handlers.functions + for i=1,#e do + local ei = e[i] + if type(ei) == "string" then + functions["@tx@"](ei,handlers) + else + serialize(ei,handlers) + end + end end -function xml.isempty(root, pattern) - if pattern == "" or pattern == "*" then - pattern = nil +local function serialize(e,handlers,...) + local initialize = handlers.initialize + local finalize = handlers.finalize + local functions = handlers.functions + if initialize then + local state = initialize(...) + if not state == true then + return state + end end - if pattern then - -- todo - return false + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) else - return not root or not root.dt or #root.dt == 0 or root.dt == "" + functions["@dc@"](e,handlers) + end + if finalize then + return finalize() end end ---[[ldx-- -

The next helper erases an element but keeps the table as it is, -and since empty strings are not serialized (effectively) it does -not harm. Copying the table would take more time. Usage:

- - -dt[k] = xml.empty() or xml.empty(dt,k) - ---ldx]]-- - -function xml.empty(dt,k) - if dt and k then - dt[k] = "" - return dt[k] +local function xserialize(e,handlers) + local functions = handlers.functions + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) else - return "" + functions["@dc@"](e,handlers) end end ---[[ldx-- -

The next helper assigns a tree (or string). Usage:

+local handlers = { } + +local function newhandlers(settings) + local t = table.copy(handlers.verbose or { }) -- merge + if settings then + for k,v in next, settings do + if type(v) == "table" then + tk = t[k] if not tk then tk = { } t[k] = tk end + for kk,vv in next, v do + tk[kk] = vv + end + else + t[k] = v + end + end + if settings.name then + handlers[settings.name] = t + end + end + return t +end + +local nofunction = function() end + +function xml.sethandlersfunction(handler,name,fnc) + handler.functions[name] = fnc or nofunction +end + +function xml.gethandlersfunction(handler,name) + return handler.functions[name] +end + +function xml.gethandlers(name) + return handlers[name] +end + +newhandlers { + name = "verbose", + initialize = false, -- faster than nil and mt lookup + finalize = false, -- faster than nil and mt lookup + serialize = xserialize, + handle = print, + functions = { + ["@dc@"] = verbose_document, + ["@dt@"] = verbose_doctype, + ["@rt@"] = verbose_root, + ["@el@"] = verbose_element, + ["@pi@"] = verbose_pi, + ["@cm@"] = verbose_comment, + ["@cd@"] = verbose_cdata, + ["@tx@"] = verbose_text, + } +} + +--[[ldx-- +

How you deal with saving data depends on your preferences. For a 40 MB database +file the timing on a 2.3 Core Duo are as follows (time in seconds):

+ + +1.3 : load data from file to string +6.1 : convert string into tree +5.3 : saving in file using xmlsave +6.8 : converting to string using xml.tostring +3.6 : saving converted string in file + + +

Beware, these were timing with the old routine but measurements will not be that +much different I guess.

+--ldx]]-- + +-- maybe this will move to lxml-xml + +local result + +local xmlfilehandler = newhandlers { + name = "file", + initialize = function(name) result = io.open(name,"wb") return result end, + finalize = function() result:close() return true end, + handle = function(...) result:write(...) end, +} + +-- no checking on writeability here but not faster either +-- +-- local xmlfilehandler = newhandlers { +-- initialize = function(name) io.output(name,"wb") return true end, +-- finalize = function() io.close() return true end, +-- handle = io.write, +-- } + + +function xml.save(root,name) + serialize(root,xmlfilehandler,name) +end + +local result + +local xmlstringhandler = newhandlers { + name = "string", + initialize = function() result = { } return result end, + finalize = function() return concat(result) end, + handle = function(...) result[#result+1] = concat { ... } end +} + +local function xmltostring(root) -- 25% overhead due to collecting + if root then + if type(root) == 'string' then + return root + else -- if next(root) then -- next is faster than type (and >0 test) + return serialize(root,xmlstringhandler) or "" + end + end + return "" +end + +local function xmltext(root) -- inline + return (root and xmltostring(root)) or "" +end + +function initialize_mt(root) + mt = { __tostring = xmltext, __index = root } +end + +xml.defaulthandlers = handlers +xml.newhandlers = newhandlers +xml.serialize = serialize +xml.tostring = xmltostring + +--[[ldx-- +

The next function operated on the content only and needs a handle function +that accepts a string.

+--ldx]]-- + +local function xmlstring(e,handle) + if not handle or (e.special and e.tg ~= "@rt@") then + -- nothing + elseif e.tg then + local edt = e.dt + if edt then + for i=1,#edt do + xmlstring(edt[i],handle) + end + end + else + handle(e) + end +end + +xml.string = xmlstring + +--[[ldx-- +

A few helpers:

+--ldx]]-- + +--~ xmlsetproperty(root,"settings",settings) + +function xml.settings(e) + while e do + local s = e.settings + if s then + return s + else + e = e.__p__ + end + end + return nil +end + +function xml.root(e) + local r = e + while e do + e = e.__p__ + if e then + r = e + end + end + return r +end + +function xml.parent(root) + return root.__p__ +end + +function xml.body(root) + return (root.ri and root.dt[root.ri]) or root -- not ok yet +end + +function xml.name(root) + if not root then + return "" + elseif root.ns == "" then + return root.tg + else + return root.ns .. ":" .. root.tg + end +end + +--[[ldx-- +

The next helper erases an element but keeps the table as it is, +and since empty strings are not serialized (effectively) it does +not harm. Copying the table would take more time. Usage:

+--ldx]]-- + +function xml.erase(dt,k) + if dt then + if k then + dt[k] = "" + else for k=1,#dt do + dt[1] = { "" } + end end + end +end + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

dt[k] = xml.assign(root) or xml.assign(dt,k,root) @@ -3635,6 +5031,42 @@ function xml.assign(dt,k,root) end end +-- the following helpers may move + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

+ +xml.tocdata(e) +xml.tocdata(e,"error") + +--ldx]]-- + +function xml.tocdata(e,wrapper) + local whatever = xmltostring(e.dt) + if wrapper then + whatever = format("<%s>%s",wrapper,whatever,wrapper) + end + local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e } + setmetatable(t,getmetatable(e)) + e.dt = { t } +end + +function xml.makestandalone(root) + if root.ri then + local dt = root.dt + for k=1,#dt do + local v = dt[k] + if type(v) == "table" and v.special and v.tg == "@pi@" then + local txt = v.dt[1] + if find(txt,"xml.*version=") then + v.dt[1] = txt .. " standalone='yes'" + break + end + end + end + end +end + end -- of closure @@ -3648,1420 +5080,1285 @@ if not modules then modules = { } end modules ['lxml-pth'] = { license = "see context related readme files" } +-- e.ni is only valid after a filter run + local concat, remove, insert = table.concat, table.remove, table.insert local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, lower, gmatch, gsub, find = string.format, string.lower, string.gmatch, string.gsub, string.find +local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep +local lpegmatch = lpeg.match + +-- beware, this is not xpath ... e.g. position is different (currently) and +-- we have reverse-sibling as reversed preceding sibling --[[ldx--

This module can be used stand alone but also inside in which case it hooks into the tracker code. Therefore we provide a few functions that set the tracers. Here we overload a previously defined function.

+

If I can get in the mood I will make a variant that is XSLT compliant +but I wonder if it makes sense.

--ldx]]-- -local trace_lpath = false - -if trackers then - trackers.register("xml.lpath", function(v) trace_lpath = v end) -end +--[[ldx-- +

Expecially the lpath code is experimental, we will support some of xpath, but +only things that make sense for us; as compensation it is possible to hook in your +own functions. Apart from preprocessing content for we also need +this module for process management, like handling and +files.

-local settrace = xml.settrace -- lxml-tab + +a/b/c /*/c +a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) +a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) + +--ldx]]-- -function xml.settrace(str,value) - if str == "lpath" then - trace_lpath = value or false - else - settrace(str,value) -- lxml-tab - end -end +local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end +local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end +local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end --[[ldx-- -

We've now arrived at an intersting part: accessing the tree using a subset +

We've now arrived at an interesting part: accessing the tree using a subset of and since we're not compatible we call it . We will explain more about its usage in other documents.

--ldx]]-- -local lpathcalls = 0 -- statistics -local lpathcached = 0 -- statistics +local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end +local lpathcached = 0 function xml.lpathcached() return lpathcached end -xml.functions = xml.functions or { } -xml.expressions = xml.expressions or { } +xml.functions = xml.functions or { } -- internal +xml.expressions = xml.expressions or { } -- in expressions +xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection) +xml.specialhandler = xml.specialhandler or { } local functions = xml.functions local expressions = xml.expressions +local finalizers = xml.finalizers -local actions = { - [10] = "stay", - [11] = "parent", - [12] = "subtree root", - [13] = "document root", - [14] = "any", - [15] = "many", - [16] = "initial", - [20] = "match", - [21] = "match one of", - [22] = "match and attribute eq", - [23] = "match and attribute ne", - [24] = "match one of and attribute eq", - [25] = "match one of and attribute ne", - [27] = "has attribute", - [28] = "has value", - [29] = "fast match", - [30] = "select", - [31] = "expression", - [40] = "processing instruction", -} - --- a rather dumb lpeg +finalizers.xml = finalizers.xml or { } +finalizers.tex = finalizers.tex or { } -local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc +local function fallback (t, name) + local fn = finalizers[name] + if fn then + t[name] = fn + else + logs.report("xml","unknown sub finalizer '%s'",tostring(name)) + fn = function() end + end + return fn +end --- instead of using functions we just parse a few names which saves a call --- later on +setmetatable(finalizers.xml, { __index = fallback }) +setmetatable(finalizers.tex, { __index = fallback }) -local lp_position = P("position()") / "ps" -local lp_index = P("index()") / "id" -local lp_text = P("text()") / "tx" -local lp_name = P("name()") / "(ns~='' and ns..':'..tg)" -- "((rt.ns~='' and rt.ns..':'..rt.tg) or '')" -local lp_tag = P("tag()") / "tg" -- (rt.tg or '') -local lp_ns = P("ns()") / "ns" -- (rt.ns or '') -local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") -local lp_doequal = P("=") / "==" -local lp_attribute = P("@") / "" * Cc("(at['") * R("az","AZ","--","__")^1 * Cc("'] or '')") +xml.defaultprotocol = "xml" -local lp_lua_function = C(R("az","AZ","--","__")^1 * (P(".") * R("az","AZ","--","__")^1)^1) * P("(") / function(t) -- todo: better . handling - return t .. "(" -end +-- as xsl does not follow xpath completely here we will also +-- be more liberal especially with regards to the use of | and +-- the rootpath: +-- +-- test : all 'test' under current +-- /test : 'test' relative to current +-- a|b|c : set of names +-- (a|b|c) : idem +-- ! : not +-- +-- after all, we're not doing transformations but filtering. in +-- addition we provide filter functions (last bit) +-- +-- todo: optimizer +-- +-- .. : parent +-- * : all kids +-- / : anchor here +-- // : /**/ +-- ** : all in between +-- +-- so far we had (more practical as we don't transform) +-- +-- {/test} : kids 'test' under current node +-- {test} : any kid with tag 'test' +-- {//test} : same as above -local lp_function = C(R("az","AZ","--","__")^1) * P("(") / function(t) -- todo: better . handling - if expressions[t] then - return "expressions." .. t .. "(" - else - return "expressions.error(" - end -end +-- evaluator (needs to be redone, for the moment copied) -local lparent = lpeg.P("(") -local rparent = lpeg.P(")") -local noparent = 1 - (lparent+rparent) -local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent} -local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"} +-- todo: apply_axis(list,notable) and collection vs single --- if we use a dedicated namespace then we don't need to pass rt and k +local apply_axis = { } -local lp_special = (C(P("name")+P("text")+P("tag"))) * value / function(t,s) - if expressions[t] then - if s then - return "expressions." .. t .. "(r,k," .. s ..")" - else - return "expressions." .. t .. "(r,k)" +apply_axis['root'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local rt = ll + while ll do + ll = ll.__p__ + if ll then + rt = ll + end end - else - return "expressions.error(" .. t .. ")" + collected[#collected+1] = rt end + return collected end -local converter = lpeg.Cs ( ( - lp_position + - lp_index + - lp_text + lp_name + -- fast one - lp_special + - lp_noequal + lp_doequal + - lp_attribute + - lp_lua_function + - lp_function + -1 )^1 ) - --- expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1 +apply_axis['self'] = function(list) +--~ local collected = { } +--~ for l=1,#list do +--~ collected[#collected+1] = list[l] +--~ end +--~ return collected + return list +end -local template = [[ - return function(expressions,r,d,k,e,dt,ns,tg,id,ps) - local at, tx = e.at or { }, dt[1] or "" - return %s +apply_axis['child'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local dt = ll.dt + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + end + end + ll.en = en end -]] - -local function make_expression(str) - str = converter:match(str) - return str, loadstring(format(template,str))() -end - -local map = { } - -local space = S(' \r\n\t') -local squote = S("'") -local dquote = S('"') -local lparent = P('(') -local rparent = P(')') -local atsign = P('@') -local lbracket = P('[') -local rbracket = P(']') -local exclam = P('!') -local period = P('.') -local eq = P('==') + P('=') -local ne = P('<>') + P('!=') -local star = P('*') -local slash = P('/') -local colon = P(':') -local bar = P('|') -local hat = P('^') -local valid = R('az', 'AZ', '09') + S('_-') -local name_yes = C(valid^1 + star) * colon * C(valid^1 + star) -- permits ns:* *:tg *:* -local name_nop = Cc("*") * C(valid^1) -local name = name_yes + name_nop -local number = C((S('+-')^0 * R('09')^1)) / tonumber -local names = (bar^0 * name)^1 -local morenames = name * (bar^0 * name)^1 -local instructiontag = P('pi::') -local spacing = C(space^0) -local somespace = space^1 -local optionalspace = space^0 -local text = C(valid^0) -local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -local empty = 1-slash - -local is_eq = lbracket * atsign * name * eq * value * rbracket -local is_ne = lbracket * atsign * name * ne * value * rbracket -local is_attribute = lbracket * atsign * name * rbracket -local is_value = lbracket * value * rbracket -local is_number = lbracket * number * rbracket - -local nobracket = 1-(lbracket+rbracket) -- must be improved -local is_expression = lbracket * C(((C(nobracket^1))/make_expression)) * rbracket - -local is_expression = lbracket * (C(nobracket^1))/make_expression * rbracket - -local is_one = name -local is_none = exclam * name -local is_one_of = ((lparent * names * rparent) + morenames) -local is_none_of = exclam * ((lparent * names * rparent) + morenames) - -local stay = (period ) -local parent = (period * period ) / function( ) map[#map+1] = { 11 } end -local subtreeroot = (slash + hat ) / function( ) map[#map+1] = { 12 } end -local documentroot = (hat * hat ) / function( ) map[#map+1] = { 13 } end -local any = (star ) / function( ) map[#map+1] = { 14 } end -local many = (star * star ) / function( ) map[#map+1] = { 15 } end -local initial = (hat * hat * hat ) / function( ) map[#map+1] = { 16 } end - -local match = (is_one ) / function(...) map[#map+1] = { 20, true , ... } end -local match_one_of = (is_one_of ) / function(...) map[#map+1] = { 21, true , ... } end -local dont_match = (is_none ) / function(...) map[#map+1] = { 20, false, ... } end -local dont_match_one_of = (is_none_of ) / function(...) map[#map+1] = { 21, false, ... } end - -local match_and_eq = (is_one * is_eq ) / function(...) map[#map+1] = { 22, true , ... } end -local match_and_ne = (is_one * is_ne ) / function(...) map[#map+1] = { 23, true , ... } end -local dont_match_and_eq = (is_none * is_eq ) / function(...) map[#map+1] = { 22, false, ... } end -local dont_match_and_ne = (is_none * is_ne ) / function(...) map[#map+1] = { 23, false, ... } end - -local match_one_of_and_eq = (is_one_of * is_eq ) / function(...) map[#map+1] = { 24, true , ... } end -local match_one_of_and_ne = (is_one_of * is_ne ) / function(...) map[#map+1] = { 25, true , ... } end -local dont_match_one_of_and_eq = (is_none_of * is_eq ) / function(...) map[#map+1] = { 24, false, ... } end -local dont_match_one_of_and_ne = (is_none_of * is_ne ) / function(...) map[#map+1] = { 25, false, ... } end - -local has_attribute = (is_one * is_attribute) / function(...) map[#map+1] = { 27, true , ... } end -local has_value = (is_one * is_value ) / function(...) map[#map+1] = { 28, true , ... } end -local dont_has_attribute = (is_none * is_attribute) / function(...) map[#map+1] = { 27, false, ... } end -local dont_has_value = (is_none * is_value ) / function(...) map[#map+1] = { 28, false, ... } end -local position = (is_one * is_number ) / function(...) map[#map+1] = { 30, true, ... } end -local dont_position = (is_none * is_number ) / function(...) map[#map+1] = { 30, false, ... } end - -local expression = (is_one * is_expression)/ function(...) map[#map+1] = { 31, true, ... } end -local dont_expression = (is_none * is_expression)/ function(...) map[#map+1] = { 31, false, ... } end - -local self_expression = ( is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end - map[#map+1] = { 31, true, "*", "*", ... } end -local dont_self_expression = (exclam * is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end - map[#map+1] = { 31, false, "*", "*", ... } end - -local instruction = (instructiontag * text ) / function(...) map[#map+1] = { 40, ... } end -local nothing = (empty ) / function( ) map[#map+1] = { 15 } end -- 15 ? -local crap = (1-slash)^1 - --- a few ugly goodies: - -local docroottag = P('^^') / function( ) map[#map+1] = { 12 } end -local subroottag = P('^') / function( ) map[#map+1] = { 13 } end -local roottag = P('root::') / function( ) map[#map+1] = { 12 } end -local parenttag = P('parent::') / function( ) map[#map+1] = { 11 } end -local childtag = P('child::') -local selftag = P('self::') - --- there will be more and order will be optimized - -local selector = ( - instruction + --- many + any + -- brrr, not here ! - parent + stay + - dont_position + position + - dont_match_one_of_and_eq + dont_match_one_of_and_ne + - match_one_of_and_eq + match_one_of_and_ne + - dont_match_and_eq + dont_match_and_ne + - match_and_eq + match_and_ne + - dont_expression + expression + - dont_self_expression + self_expression + - has_attribute + has_value + - dont_match_one_of + match_one_of + - dont_match + match + - many + any + - crap + empty -) - -local grammar = P { "startup", - startup = (initial + documentroot + subtreeroot + roottag + docroottag + subroottag)^0 * V("followup"), - followup = ((slash + parenttag + childtag + selftag)^0 * selector)^1, -} + return collected +end -local function compose(str) - if not str or str == "" then - -- wildcard - return true - elseif str == '/' then - -- root - return false - else - map = { } - grammar:match(str) - if #map == 0 then - return true - else - local m = map[1][1] - if #map == 1 then - if m == 14 or m == 15 then - -- wildcard - return true - elseif m == 12 then - -- root - return false - end - elseif #map == 2 and m == 12 and map[2][1] == 20 then - -- return { { 29, map[2][2], map[2][3], map[2][4], map[2][5] } } - map[2][1] = 29 - return { map[2] } - end - if m ~= 11 and m ~= 12 and m ~= 13 and m ~= 14 and m ~= 15 and m ~= 16 then - insert(map, 1, { 16 }) +local function collect(list,collected) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + collect(dk,collected) end - -- print(gsub(table.serialize(map),"[ \n]+"," ")) - return map end + list.en = en end end +apply_axis['descendant'] = function(list) + local collected = { } + for l=1,#list do + collect(list[l],collected) + end + return collected +end -local cache = { } - -function xml.lpath(pattern,trace) - lpathcalls = lpathcalls + 1 - if type(pattern) == "string" then - local result = cache[pattern] - if result == nil then -- can be false which is valid -) - result = compose(pattern) - cache[pattern] = result - lpathcached = lpathcached + 1 +local function collect(list,collected) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + collect(dk,collected) + end end - if trace or trace_lpath then - xml.lshow(result) + list.en = en + end +end +apply_axis['descendant-or-self'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + if ll.special ~= true then -- catch double root + collected[#collected+1] = ll end - return result - else - return pattern + collect(ll,collected) end + return collected end -function xml.cached_patterns() - return cache +apply_axis['ancestor'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + while ll do + ll = ll.__p__ + if ll then + collected[#collected+1] = ll + end + end + end + return collected end --- we run out of locals (limited to 200) --- --- local fallbackreport = (texio and texio.write) or io.write - -function xml.lshow(pattern,report) --- report = report or fallbackreport - report = report or (texio and texio.write) or io.write - local lp = xml.lpath(pattern) - if lp == false then - report(" -: root\n") - elseif lp == true then - report(" -: wildcard\n") - else - if type(pattern) == "string" then - report(format("pattern: %s\n",pattern)) - end - for k=1,#lp do - local v = lp[k] - if #v > 1 then - local t = { } - for i=2,#v do - local vv = v[i] - if type(vv) == "string" then - t[#t+1] = (vv ~= "" and vv) or "#" - elseif type(vv) == "boolean" then - t[#t+1] = (vv and "==") or "<>" - end - end - report(format("%2i: %s %s -> %s\n", k,v[1],actions[v[1]],concat(t," "))) - else - report(format("%2i: %s %s\n", k,v[1],actions[v[1]])) +apply_axis['ancestor-or-self'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + collected[#collected+1] = ll + while ll do + ll = ll.__p__ + if ll then + collected[#collected+1] = ll end end end + return collected end -function xml.xshow(e,...) -- also handy when report is given, use () to isolate first e - local t = { ... } --- local report = (type(t[#t]) == "function" and t[#t]) or fallbackreport - local report = (type(t[#t]) == "function" and t[#t]) or (texio and texio.write) or io.write - if e == nil then - report("\n") - elseif type(e) ~= "table" then - report(tostring(e)) - elseif e.tg then - report(tostring(e) .. "\n") - else - for i=1,#e do - report(tostring(e[i]) .. "\n") +apply_axis['parent'] = function(list) + local collected = { } + for l=1,#list do + local pl = list[l].__p__ + if pl then + collected[#collected+1] = pl end end + return collected end ---[[ldx-- -

An is converted to a table with instructions for traversing the -tree. Hoever, simple cases are signaled by booleans. Because we don't know in -advance what we want to do with the found element the handle gets three arguments:

+apply_axis['attribute'] = function(list) + return { } +end - -r : the root element of the data table -d : the data table of the result -t : the index in the data table of the result - +apply_axis['namespace'] = function(list) + return { } +end -

Access to the root and data table makes it possible to construct insert and delete -functions.

---ldx]]-- - -local functions = xml.functions -local expressions = xml.expressions - -expressions.contains = string.find -expressions.find = string.find -expressions.upper = string.upper -expressions.lower = string.lower -expressions.number = tonumber -expressions.boolean = toboolean - -expressions.oneof = function(s,...) -- slow - local t = {...} for i=1,#t do if s == t[i] then return true end end return false -end - -expressions.error = function(str) - xml.error_handler("unknown function in lpath expression",str or "?") - return false -end - -functions.text = function(root,k,n) -- unchecked, maybe one deeper - local t = type(t) - if t == "string" then - return t - else -- todo n - local rdt = root.dt - return (rdt and rdt[k]) or root[k] or "" - end +apply_axis['following'] = function(list) -- incomplete +--~ local collected = { } +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni+1,#d do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ collected[#collected+1] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } +end + +apply_axis['preceding'] = function(list) -- incomplete +--~ local collected = { } +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni-1,1,-1 do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ collected[#collected+1] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } end -functions.name = function(d,k,n) -- ns + tg - local found = false - n = n or 0 - if not k then - -- not found - elseif n == 0 then - local dk = d[k] - found = dk and (type(dk) == "table") and dk - elseif n < 0 then - for i=k-1,1,-1 do - local di = d[i] - if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end - end - end - else - for i=k+1,#d,1 do +apply_axis['following-sibling'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni+1,#d do local di = d[i] if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end + collected[#collected+1] = di end end end - if found then - local ns, tg = found.rn or found.ns or "", found.tg - if ns ~= "" then - return ns .. ":" .. tg - else - return tg - end - else - return "" - end + return collected end -functions.tag = function(d,k,n) -- only tg - local found = false - n = n or 0 - if not k then - -- not found - elseif n == 0 then - local dk = d[k] - found = dk and (type(dk) == "table") and dk - elseif n < 0 then - for i=k-1,1,-1 do +apply_axis['preceding-sibling'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=1,ll.ni-1 do local di = d[i] if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end + collected[#collected+1] = di end end - else - for i=k+1,#d,1 do + end + return collected +end + +apply_axis['reverse-sibling'] = function(list) -- reverse preceding + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni-1,1,-1 do local di = d[i] if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end + collected[#collected+1] = di end end end - return (found and found.tg) or "" + return collected end -expressions.text = functions.text -expressions.name = functions.name -expressions.tag = functions.tag +apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self'] +apply_axis['auto-descendant'] = apply_axis['descendant'] +apply_axis['auto-child'] = apply_axis['child'] +apply_axis['auto-self'] = apply_axis['self'] +apply_axis['initial-child'] = apply_axis['child'] -local function traverse(root,pattern,handle,reverse,index,parent,wildcard) -- multiple only for tags, not for namespaces - if not root then -- error - return false - elseif pattern == false then -- root - handle(root,root.dt,root.ri) - return false - elseif pattern == true then -- wildcard - local rootdt = root.dt - if rootdt then - local start, stop, step = 1, #rootdt, 1 - if reverse then - start, stop, step = stop, start, -1 - end - for k=start,stop,step do - if handle(root,rootdt,root.ri or k) then return false end - if not traverse(rootdt[k],true,handle,reverse) then return false end - end - end - return false - elseif root.dt then - index = index or 1 - local action = pattern[index] - local command = action[1] - if command == 29 then -- fast case /oeps - local rootdt = root.dt - for k=1,#rootdt do - local e = rootdt[k] - local tg = e.tg - if e.tg then - local ns = e.rn or e.ns - local ns_a, tg_a = action[3], action[4] - local matched = (ns_a == "*" or ns == ns_a) and (tg_a == "*" or tg == tg_a) - if not action[2] then matched = not matched end - if matched then - if handle(root,rootdt,k) then return false end - end - end - end - elseif command == 11 then -- parent - local ep = root.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false +local function apply_nodes(list,directive,nodes) + -- todo: nodes[1] etc ... negated node name in set ... when needed + -- ... currently ignored + local maxn = #nodes + if maxn == 3 then --optimized loop + local nns, ntg = nodes[2], nodes[3] + if not nns and not ntg then -- wildcard + if directive then + return list + else + return { } end else - if (command == 16 or command == 12) and index == 1 then -- initial - -- wildcard = true - wildcard = command == 16 -- ok? - index = index + 1 - action = pattern[index] - command = action and action[1] or 0 -- something is wrong - end - if command == 11 then -- parent - local ep = root.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false - end - else - local rootdt = root.dt - local start, stop, step, n, dn = 1, #rootdt, 1, 0, 1 - if command == 30 then - if action[5] < 0 then - start, stop, step = stop, start, -1 - dn = -1 - end - elseif reverse and index == #pattern then - start, stop, step = stop, start, -1 - end - local idx = 0 - local hsh = { } -- this will slooow down the lot - for k=start,stop,step do -- we used to have functions for all but a case is faster - local e = rootdt[k] - local ns, tg = e.rn or e.ns, e.tg - if tg then - -- we can optimize this for simple searches, but it probably does not pay off - hsh[tg] = (hsh[tg] or 0) + 1 - idx = idx + 1 - if command == 30 then - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false + local collected, m, p = { }, 0, nil + if not nns then -- only check tag + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + if directive then + if ntg == ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - if not action[2] then matched = not matched end - if matched then - n = n + dn - if n == action[5] then - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - break - end - elseif wildcard then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end - end - else - local matched, multiple = false, false - if command == 20 then -- match - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - elseif command == 21 then -- match one of - multiple = true - for i=3,#action,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - elseif command == 22 then -- eq - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - matched = matched and e.at[action[6]] == action[7] - elseif command == 23 then -- ne - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = mached and e.at[action[6]] ~= action[7] - elseif command == 24 then -- one of eq - multiple = true - for i=3,#action-2,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[#action-1]] == action[#action] - elseif command == 25 then -- one of ne - multiple = true - for i=3,#action-2,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[#action-1]] ~= action[#action] - elseif command == 27 then -- has attribute - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[5]] - elseif command == 28 then -- has value - local edt, ns_a, tg_a = e.dt, action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = matched and edt and edt[1] == action[5] - elseif command == 31 then - local edt, ns_a, tg_a = e.dt, action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - if matched then - matched = action[6](expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1) - end - end - if matched then -- combine tg test and at test - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - if wildcard then - if multiple then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end - else - -- maybe or multiple; anyhow, check on (section|title) vs just section and title in example in lxml - if not traverse(e,pattern,handle,reverse,index,root) then return false end - end - end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - elseif command == 14 then -- any - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - elseif command == 15 then -- many - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root,true) then return false end - end - -- not here : 11 - elseif command == 11 then -- parent - local ep = e.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,root,index+1) then return false end - elseif handle(root,rootdt,k) then - return false - end - elseif command == 40 and e.special and tg == "@pi@" then -- pi - local pi = action[2] - if pi ~= "" then - local pt = e.dt[1] - if pt and pt:find(pi) then - if handle(root,rootdt,k) then - return false - end - end - elseif handle(root,rootdt,k) then - return false - end - elseif wildcard then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end + elseif ntg ~= ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + end + end + elseif not ntg then -- only check namespace + for l=1,#list do + local ll = list[l] + local lns = ll.rn or ll.ns + if lns then + if directive then + if lns == nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end + elseif lns ~= nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - else - -- not here : 11 - if command == 11 then -- parent - local ep = e.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false + end + end + else -- check both + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = ltg == ntg and lns == nns + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - break -- else loop + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end end end end + return collected + end + else + local collected, m, p = { }, 0, nil + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = false + for n=1,maxn,3 do + local nns, ntg = nodes[n+1], nodes[n+2] + ok = (not ntg or ltg == ntg) and (not nns or lns == nns) + if ok then + break + end + end + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + end end + return collected end - return true end -xml.traverse = traverse +local quit_expression = false ---[[ldx-- -

Next come all kind of locators and manipulators. The most generic function here -is xml.filter(root,pattern). All registers functions in the filters namespace -can be path of a search path, as in:

+local function apply_expression(list,expression,order) + local collected = { } + quit_expression = false + for l=1,#list do + local ll = list[l] + if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1 + collected[#collected+1] = ll + end + if quit_expression then + break + end + end + return collected +end + +local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb + +local spaces = S(" \n\r\t\f")^0 +local lp_space = S(" \n\r\t\f") +local lp_any = P(1) +local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") +local lp_doequal = P("=") / "==" +local lp_or = P("|") / " or " +local lp_and = P("&") / " and " + +local lp_builtin = P ( + P("firstindex") / "1" + + P("lastindex") / "(#ll.__p__.dt or 1)" + + P("firstelement") / "1" + + P("lastelement") / "(ll.__p__.en or 1)" + + P("first") / "1" + + P("last") / "#list" + + P("rootposition") / "order" + + P("position") / "l" + -- is element in finalizer + P("order") / "order" + + P("element") / "(ll.ei or 1)" + + P("index") / "(ll.ni or 1)" + + P("match") / "(ll.mi or 1)" + + P("text") / "(ll.dt[1] or '')" + + -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" + + P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" + + P("tag") / "ll.tg" + + P("ns") / "ll.ns" + ) * ((spaces * P("(") * spaces * P(")"))/"") + +local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * R("az","AZ","--","__")^1 * Cc("'])") +local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end) +local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end) +local lp_fastpos = lp_fastpos_n + lp_fastpos_p +local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false") + +local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling + return t .. "(" +end - -local r, d, k = xml.filter(root,"/a/b/c/position(4)" - ---ldx]]-- +local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling + if expressions[t] then + return "expr." .. t .. "(" + else + return "expr.error(" + end +end -local traverse, lpath, convert = xml.traverse, xml.lpath, xml.convert +local lparent = lpeg.P("(") +local rparent = lpeg.P(")") +local noparent = 1 - (lparent+rparent) +local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent} +local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"} -xml.filters = { } +local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')") +local lp_number = S("+-") * R("09")^1 +local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'") +local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"')) -function xml.filters.default(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end) - return dt and dt[dk], rt, dt, dk -end +local cleaner -function xml.filters.attributes(root,pattern,arguments) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end) - local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at) - if ekat then - if arguments then - return ekat[arguments] or "", rt, dt, dk +local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s) + if expressions[t] then + s = s and s ~= "" and lpegmatch(cleaner,s) + if s and s ~= "" then + return "expr." .. t .. "(ll," .. s ..")" else - return ekat, rt, dt, dk + return "expr." .. t .. "(ll)" end else - return { }, rt, dt, dk + return "expr.error(" .. t .. ")" end end -function xml.filters.reverse(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse') - return dt and dt[dk], rt, dt, dk -end +local content = + lp_builtin + + lp_attribute + + lp_special + + lp_noequal + lp_doequal + + lp_or + lp_and + + lp_reserved + + lp_lua_function + lp_function + + lp_content + -- too fragile + lp_child + + lp_any + +local converter = Cs ( + lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0 +) -function xml.filters.count(root,pattern,everything) - local n = 0 - traverse(root, lpath(pattern), function(r,d,t) - if everything or type(d[t]) == "table" then - n = n + 1 - end - end) - return n -end +cleaner = Cs ( ( +--~ lp_fastpos + + lp_reserved + + lp_number + + lp_string + +1 )^1 ) -function xml.filters.elements(root, pattern) -- == all - local t = { } - traverse(root, lpath(pattern), function(r,d,k) - local e = d[k] - if e then - t[#t+1] = e - end - end) - return t -end -function xml.filters.texts(root, pattern) - local t = { } - traverse(root, lpath(pattern), function(r,d,k) - local e = d[k] - if e and e.dt then - t[#t+1] = e.dt - end - end) - return t -end +--~ expr -function xml.filters.first(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end) - return dt and dt[dk], rt, dt, dk -end +local template_e = [[ + local expr = xml.expressions + return function(list,ll,l,order) + return %s + end +]] -function xml.filters.last(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse') - return dt and dt[dk], rt, dt, dk -end +local template_f_y = [[ + local finalizer = xml.finalizers['%s']['%s'] + return function(collection) + return finalizer(collection,%s) + end +]] -function xml.filters.index(root,pattern,arguments) - local rt, dt, dk, reverse, i = nil, nil, nil, false, tonumber(arguments or '1') or 1 - if i and i ~= 0 then - if i < 0 then - reverse, i = true, -i - end - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk, i = r, d, k, i-1 return i == 0 end, reverse) - if i == 0 then - return dt and dt[dk], rt, dt, dk - end +local template_f_n = [[ + return xml.finalizers['%s']['%s'] +]] + +-- + +local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } +local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } +local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } +local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } +local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } +local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } +local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } +local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } +local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } +local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } +local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } +local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } +local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } +local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } +local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } + +local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } +local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } +local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } +local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } + +local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } + +local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } + +local skip = { } + +local function errorrunner_e(str,cnv) + if not skip[str] then + logs.report("lpath","error in expression: %s => %s",str,cnv) + skip[str] = cnv or str end - return nil, nil, nil, nil + return false +end +local function errorrunner_f(str,arg) + logs.report("lpath","error in finalizer: %s(%s)",str,arg or "") + return false end -function xml.filters.attribute(root,pattern,arguments) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end) - local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at) - return (ekat and (ekat[arguments] or ekat[gsub(arguments,"^([\"\'])(.*)%1$","%2")])) or "" +local function register_nodes(nodetest,nodes) + return { kind = "nodes", nodetest = nodetest, nodes = nodes } end -function xml.filters.text(root,pattern,arguments) -- ?? why index, tostring slow - local dtk, rt, dt, dk = xml.filters.index(root,pattern,arguments) - if dtk then -- n - local dtkdt = dtk.dt - if not dtkdt then - return "", rt, dt, dk - elseif #dtkdt == 1 and type(dtkdt[1]) == "string" then - return dtkdt[1], rt, dt, dk - else - return xml.tostring(dtkdt), rt, dt, dk - end +local function register_expression(expression) + local converted = lpegmatch(converter,expression) + local runner = loadstring(format(template_e,converted)) + runner = (runner and runner()) or function() errorrunner_e(expression,converted) end + return { kind = "expression", expression = expression, converted = converted, evaluator = runner } +end + +local function register_finalizer(protocol,name,arguments) + local runner + if arguments and arguments ~= "" then + runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments)) else - return "", rt, dt, dk + runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name)) end + runner = (runner and runner()) or function() errorrunner_f(name,arguments) end + return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner } end -function xml.filters.tag(root,pattern,n) - local tag = "" - traverse(root, lpath(pattern), function(r,d,k) - tag = xml.functions.tag(d,k,n and tonumber(n)) - return true - end) - return tag -end +local expression = P { "ex", + ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]", + sq = "'" * (1 - S("'"))^0 * "'", + dq = '"' * (1 - S('"'))^0 * '"', +} -function xml.filters.name(root,pattern,n) - local tag = "" - traverse(root, lpath(pattern), function(r,d,k) - tag = xml.functions.name(d,k,n and tonumber(n)) - return true - end) - return tag +local arguments = P { "ar", + ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")", + nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end, + sq = P("'") * (1 - P("'"))^0 * P("'"), + dq = P('"') * (1 - P('"'))^0 * P('"'), +} + +-- todo: better arg parser + +local function register_error(str) + return { kind = "error", error = format("unparsed: %s",str) } end ---[[ldx-- -

For splitting the filter function from the path specification, we can -use string matching or lpeg matching. Here the difference in speed is -neglectable but the lpeg variant is more robust.

---ldx]]-- +-- there is a difference in * and /*/ and so we need to catch a few special cases + +local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed +local special_2 = P("/") * Cc(register_auto_self) +local special_3 = P("") * Cc(register_auto_self) + +local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside + + patterns = spaces * V("protocol") * spaces * ( + ( V("special") * spaces * P(-1) ) + + ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) + ), + + protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), + + -- the / is needed for // as descendant or self is somewhat special + -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + + axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") + + V("descendant_or_self") + V("following_sibling") + V("following") + + V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + + #(1-P(-1)) * Cc(register_auto_child), + + special = special_1 + special_2 + special_3, --- not faster but hipper ... although ... i can't get rid of the trailing / in the path + initial = (P("/") * spaces * Cc(register_initial_child))^-1, -local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc + error = (P(1)^1) / register_error, -local slash = P('/') -local name = (R("az","AZ","--","__"))^1 -local path = C(((1-slash)^0 * slash)^1) -local argument = P { "(" * C(((1 - S("()")) + V(1))^0) * ")" } -local action = Cc(1) * path * C(name) * argument -local attribute = Cc(2) * path * P('@') * C(name) -local direct = Cc(3) * Cc("../*") * slash^0 * C(name) * argument + shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"), -local parser = direct + action + attribute + shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0, -local filters = xml.filters -local attribute_filter = xml.filters.attributes -local default_filter = xml.filters.default + s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus + -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self), + s_descendant = P("**") * Cc(register_descendant), + s_child = P("*") * #(1-P(":")) * Cc(register_child ), +-- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ), + s_parent = P("..") * Cc(register_parent ), + s_self = P("." ) * Cc(register_self ), + s_root = P("^^") * Cc(register_root ), + s_ancestor = P("^") * Cc(register_ancestor ), --- todo: also hash, could be gc'd + descendant = P("descendant::") * Cc(register_descendant ), + child = P("child::") * Cc(register_child ), + parent = P("parent::") * Cc(register_parent ), + self = P("self::") * Cc(register_self ), + root = P('root::') * Cc(register_root ), + ancestor = P('ancestor::') * Cc(register_ancestor ), + descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ), + ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ), + -- attribute = P('attribute::') * Cc(register_attribute ), + -- namespace = P('namespace::') * Cc(register_namespace ), + following = P('following::') * Cc(register_following ), + following_sibling = P('following-sibling::') * Cc(register_following_sibling ), + preceding = P('preceding::') * Cc(register_preceding ), + preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ), + reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ), -function xml.filter(root,pattern) - local kind, a, b, c = parser:match(pattern) - if kind == 1 or kind == 3 then - return (filters[b] or default_filter)(root,a,c) - elseif kind == 2 then - return attribute_filter(root,a,b) + nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes, + + expressions = expression / register_expression, + + letters = R("az")^1, + name = (1-lpeg.S("/[]()|:*!"))^1, + negate = P("!") * Cc(false), + + nodefunction = V("negate") + P("not") * Cc(false) + Cc(true), + nodetest = V("negate") + Cc(true), + nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))), + wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")), + nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces, + + finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer, + +} + +local cache = { } + +local function nodesettostring(set,nodetest) + local t = { } + for i=1,#set,3 do + local directive, ns, tg = set[i], set[i+1], set[i+2] + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + t[#t+1] = (directive and tg) or format("not(%s)",tg) + end + if nodetest == false then + return format("not(%s)",concat(t,"|")) else - return default_filter(root,pattern) + return concat(t,"|") end end ---~ slightly faster, but first we need a proper test file ---~ ---~ local hash = { } ---~ ---~ function xml.filter(root,pattern) ---~ local h = hash[pattern] ---~ if not h then ---~ local kind, a, b, c = parser:match(pattern) ---~ if kind == 1 then ---~ h = { kind, filters[b] or default_filter, a, b, c } ---~ elseif kind == 2 then ---~ h = { kind, attribute_filter, a, b, c } ---~ else ---~ h = { kind, default_filter, a, b, c } ---~ end ---~ hash[pattern] = h ---~ end ---~ local kind = h[1] ---~ if kind == 1 then ---~ return h[2](root,h[2],h[4]) ---~ elseif kind == 2 then ---~ return h[2](root,h[2],h[3]) ---~ else ---~ return h[2](root,pattern) ---~ end ---~ end +local function tagstostring(list) + if #list == 0 then + return "no elements" + else + local t = { } + for i=1, #list do + local li = list[i] + local ns, tg = li.ns, li.tg + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + t[#t+1] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + end + return concat(t," ") + end +end ---[[ldx-- -

The following functions collect elements and texts.

---ldx]]-- +xml.nodesettostring = nodesettostring --- still somewhat bugged +local parse_pattern -- we have a harmless kind of circular reference -function xml.collect_elements(root, pattern, ignorespaces) - local rr, dd = { }, { } - traverse(root, lpath(pattern), function(r,d,k) - local dk = d and d[k] - if dk then - if ignorespaces and type(dk) == "string" and dk:find("[^%S]") then - -- ignore - else - local n = #rr+1 - rr[n], dd[n] = r, dk - end - end - end) - return dd, rr +local function lshow(parsed) + if type(parsed) == "string" then + parsed = parse_pattern(parsed) + end + local s = table.serialize_functions -- ugly + table.serialize_functions = false -- ugly + logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false)) + table.serialize_functions = s -- ugly end -function xml.collect_texts(root, pattern, flatten) - local t = { } -- no r collector - traverse(root, lpath(pattern), function(r,d,k) - if d then - local ek = d[k] - local tx = ek and ek.dt - if flatten then - if tx then - t[#t+1] = xml.tostring(tx) or "" +xml.lshow = lshow + +local function add_comment(p,str) + local pc = p.comment + if not pc then + p.comment = { str } + else + pc[#pc+1] = str + end +end + +parse_pattern = function (pattern) -- the gain of caching is rather minimal + lpathcalls = lpathcalls + 1 + if type(pattern) == "table" then + return pattern + else + local parsed = cache[pattern] + if parsed then + lpathcached = lpathcached + 1 + else + parsed = lpegmatch(parser,pattern) + if parsed then + parsed.pattern = pattern + local np = #parsed + if np == 0 then + parsed = { pattern = pattern, register_self, state = "parsing error" } + logs.report("lpath","parsing error in '%s'",pattern) + lshow(parsed) else - t[#t+1] = "" + -- we could have done this with a more complex parser but this + -- is cleaner + local pi = parsed[1] + if pi.axis == "auto-child" then + if false then + add_comment(parsed, "auto-child replaced by auto-descendant-or-self") + parsed[1] = register_auto_descendant_or_self + else + add_comment(parsed, "auto-child replaced by auto-descendant") + parsed[1] = register_auto_descendant + end + elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then + add_comment(parsed, "initial-child removed") -- we could also make it a auto-self + remove(parsed,1) + end + local np = #parsed -- can have changed + if np > 1 then + local pnp = parsed[np] + if pnp.kind == "nodes" and pnp.nodetest == true then + local nodes = pnp.nodes + if nodes[1] == true and nodes[2] == false and nodes[3] == false then + add_comment(parsed, "redundant final wildcard filter removed") + remove(parsed,np) + end + end + end end else - t[#t+1] = tx or "" + parsed = { pattern = pattern } + end + cache[pattern] = parsed + if trace_lparse and not trace_lprofile then + lshow(parsed) end - else - t[#t+1] = "" end - end) - return t + return parsed + end end -function xml.collect_tags(root, pattern, nonamespace) - local t = { } - xml.traverse(root, xml.lpath(pattern), function(r,d,k) - local dk = d and d[k] - if dk and type(dk) == "table" then - local ns, tg = e.ns, e.tg - if nonamespace then - t[#t+1] = tg -- if needed we can return an extra table - elseif ns == "" then - t[#t+1] = tg - else - t[#t+1] = ns .. ":" .. tg +-- we can move all calls inline and then merge the trace back +-- technically we can combine axis and the next nodes which is +-- what we did before but this a bit cleaner (but slower too) +-- but interesting is that it's not that much faster when we +-- go inline +-- +-- beware: we need to return a collection even when we filter +-- else the (simple) cache gets messed up + +-- caching found lookups saves not that much (max .1 sec on a 8 sec run) +-- and it also messes up finalizers + +-- watch out: when there is a finalizer, it's always called as there +-- can be cases that a finalizer returns (or does) something in case +-- there is no match; an example of this is count() + +local profiled = { } xml.profiled = profiled + +local function profiled_apply(list,parsed,nofparsed,order) + local p = profiled[parsed.pattern] + if p then + p.tested = p.tested + 1 + else + p = { tested = 1, matched = 0, finalized = 0 } + profiled[parsed.pattern] = p + end + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) + p.matched = p.matched + 1 + p.finalized = p.finalized + 1 + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + p.finalized = p.finalized + 1 + return collected end + return nil end - end) - return #t > 0 and {} + end + if collected then + p.matched = p.matched + 1 + end + return collected +end + +local function traced_apply(list,parsed,nofparsed,order) + if trace_lparse then + lshow(parsed) + end + logs.report("lpath", "collecting : %s",parsed.pattern) + logs.report("lpath", " root tags : %s",tagstostring(list)) + logs.report("lpath", " order : %s",order or "unset") + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest)) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) + logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "") + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "") + return collected + end + return nil + end + end + return collected end ---[[ldx-- -

Often using an iterators looks nicer in the code than passing handler -functions. The book describes how to use coroutines for that -purpose (). This permits -code like:

- - -for r, d, k in xml.elements(xml.load('text.xml'),"title") do - print(d[k]) +local function normal_apply(list,parsed,nofparsed,order) + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + local axis = pi.axis + if axis ~= "self" then + collected = apply_axis[axis](collected) + end + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + return pi.finalizer(collected) + end + if not collected or #collected == 0 then + local pf = i < nofparsed and parsed[nofparsed].finalizer + if pf then + return pf(collected) -- can be anything + end + return nil + end + end + return collected end - -

Which will print all the titles in the document. The iterator variant takes -1.5 times the runtime of the function variant which is due to the overhead in -creating the wrapper. So, instead of:

- - -function xml.filters.first(root,pattern) - for rt,dt,dk in xml.elements(root,pattern) - return dt and dt[dk], rt, dt, dk +local function parse_apply(list,pattern) + -- we avoid an extra call + local parsed = cache[pattern] + if parsed then + lpathcalls = lpathcalls + 1 + lpathcached = lpathcached + 1 + elseif type(pattern) == "table" then + lpathcalls = lpathcalls + 1 + parsed = pattern + else + parsed = parse_pattern(pattern) or pattern + end + if not parsed then + return + end + local nofparsed = #parsed + if nofparsed == 0 then + return -- something is wrong + end + local one = list[1] + if not one then + return -- something is wrong + elseif not trace_lpath then + return normal_apply(list,parsed,nofparsed,one.mi) + elseif trace_lprofile then + return profiled_apply(list,parsed,nofparsed,one.mi) + else + return traced_apply(list,parsed,nofparsed,one.mi) end - return nil, nil, nil, nil end - - -

We use the function variants in the filters.

---ldx]]-- -local wrap, yield = coroutine.wrap, coroutine.yield +-- internal (parsed) -function xml.elements(root,pattern,reverse) - return wrap(function() traverse(root, lpath(pattern), yield, reverse) end) +expressions.child = function(e,pattern) + return parse_apply({ e },pattern) -- todo: cache end - -function xml.elements_only(root,pattern,reverse) - return wrap(function() traverse(root, lpath(pattern), function(r,d,k) yield(d[k]) end, reverse) end) +expressions.count = function(e,pattern) + local collected = parse_apply({ e },pattern) -- todo: cache + return (collected and #collected) or 0 end -function xml.each_element(root, pattern, handle, reverse) - local ok - traverse(root, lpath(pattern), function(r,d,k) ok = true handle(r,d,k) end, reverse) - return ok +-- external + +expressions.oneof = function(s,...) -- slow + local t = {...} for i=1,#t do if s == t[i] then return true end end return false +end +expressions.error = function(str) + xml.error_handler("unknown function in lpath expression",tostring(str or "?")) + return false +end +expressions.undefined = function(s) + return s == nil end -function xml.process_elements(root, pattern, handle) - traverse(root, lpath(pattern), function(r,d,k) - local dkdt = d[k].dt - if dkdt then - for i=1,#dkdt do - local v = dkdt[i] - if v.tg then handle(v) end - end - end - end) +expressions.quit = function(s) + if s or s == nil then + quit_expression = true + end + return true end -function xml.process_attributes(root, pattern, handle) - traverse(root, lpath(pattern), function(r,d,k) - local ek = d[k] - local a = ek.at or { } - handle(a) - if next(a) then -- next is faster than type (and >0 test) - ek.at = a - else - ek.at = nil - end - end) +expressions.print = function(...) + print(...) + return true end ---[[ldx-- -

We've now arrives at the functions that manipulate the tree.

---ldx]]-- +expressions.contains = find +expressions.find = find +expressions.upper = upper +expressions.lower = lower +expressions.number = tonumber +expressions.boolean = toboolean -function xml.inject_element(root, pattern, element, prepend) - if root and element then - local matches, collect = { }, nil - if type(element) == "string" then - element = convert(element,true) - end - if element then - collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end - traverse(root, lpath(pattern), collect) - for i=1,#matches do - local m = matches[i] - local r, d, k, element, edt = m[1], m[2], m[3], m[4], nil - if element.ri then - element = element.dt[element.ri].dt - else - element = element.dt - end - if r.ri then - edt = r.dt[r.ri].dt - else - edt = d and d[k] and d[k].dt - end - if edt then - local be, af - if prepend then - be, af = xml.copy(element), edt - else - be, af = edt, xml.copy(element) - end - for i=1,#af do - be[#be+1] = af[i] - end - if r.ri then - r.dt[r.ri].dt = be - else - d[k].dt = be - end - else - -- r.dt = element.dt -- todo - end - end +-- user interface + +local function traverse(root,pattern,handle) + logs.report("xml","use 'xml.selection' instead for '%s'",pattern) + local collected = parse_apply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + handle(r,r.dt,e.ni) end end end --- todo: copy ! - -function xml.insert_element(root, pattern, element, before) -- todo: element als functie - if root and element then - if pattern == "/" then - xml.inject_element(root, pattern, element, before) - else - local matches, collect = { }, nil - if type(element) == "string" then - element = convert(element,true) - end - if element and element.ri then - element = element.dt[element.ri] - end - if element then - collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end - traverse(root, lpath(pattern), collect) - for i=#matches,1,-1 do - local m = matches[i] - local r, d, k, element = m[1], m[2], m[3], m[4] - if not before then k = k + 1 end - if element.tg then - insert(d,k,element) -- untested ---~ elseif element.dt then ---~ for _,v in ipairs(element.dt) do -- i added ---~ insert(d,k,v) ---~ k = k + 1 ---~ end ---~ end - else - local edt = element.dt - if edt then - for i=1,#edt do - insert(d,k,edt[i]) - k = k + 1 - end - end - end - end +local function selection(root,pattern,handle) + local collected = parse_apply({ root },pattern) + if collected then + if handle then + for c=1,#collected do + handle(collected[c]) end + else + return collected end end end -xml.insert_element_after = xml.insert_element -xml.insert_element_before = function(r,p,e) xml.insert_element(r,p,e,true) end -xml.inject_element_after = xml.inject_element -xml.inject_element_before = function(r,p,e) xml.inject_element(r,p,e,true) end +xml.parse_parser = parser +xml.parse_pattern = parse_pattern +xml.parse_apply = parse_apply +xml.traverse = traverse -- old method, r, d, k +xml.selection = selection -- new method, simple handle -function xml.delete_element(root, pattern) - local matches, deleted = { }, { } - local collect = function(r,d,k) matches[#matches+1] = { r, d, k } end - traverse(root, lpath(pattern), collect) - for i=#matches,1,-1 do - local m = matches[i] - deleted[#deleted+1] = remove(m[2],m[3]) - end - return deleted +local lpath = parse_pattern + +xml.lpath = lpath + +function xml.cached_patterns() + return cache end -function xml.replace_element(root, pattern, element) - if type(element) == "string" then - element = convert(element,true) - end - if element and element.ri then - element = element.dt[element.ri] - end - if element then - traverse(root, lpath(pattern), function(rm, d, k) - d[k] = element.dt -- maybe not clever enough - end) +-- generic function finalizer (independant namespace) + +local function dofunction(collected,fnc) + if collected then + local f = functions[fnc] + if f then + for c=1,#collected do + f(collected[c]) + end + else + logs.report("xml","unknown function '%s'",fnc) + end end end -local function load_data(name) -- == io.loaddata - local f, data = io.open(name), "" - if f then - data = f:read("*all",'b') -- 'b' ? - f:close() - end - return data +xml.finalizers.xml["function"] = dofunction +xml.finalizers.tex["function"] = dofunction + +-- functions + +expressions.text = function(e,n) + local rdt = e.__p__.dt + return (rdt and rdt[n]) or "" end -function xml.include(xmldata,pattern,attribute,recursive,loaddata) - -- parse="text" (default: xml), encoding="" (todo) - -- attribute = attribute or 'href' - pattern = pattern or 'include' - loaddata = loaddata or load_data - local function include(r,d,k) - local ek, name = d[k], nil - if not attribute or attribute == "" then - local ekdt = ek.dt - name = (type(ekdt) == "table" and ekdt[1]) or ekdt - end - if not name then - if ek.at then - for a in gmatch(attribute or "href","([^|]+)") do - name = ek.at[a] - if name then break end +expressions.name = function(e,n) -- ns + tg + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = type(e) == "table" and e + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break + else + n = n + 1 end end end - local data = (name and name ~= "" and loaddata(name)) or "" - if data == "" then - xml.empty(d,k) - elseif ek.at["parse"] == "text" then -- for the moment hard coded - d[k] = xml.escaped(data) - else - local xi = xml.convert(data) - if not xi then - xml.empty(d,k) - else - if recursive then - xml.include(xi,pattern,attribute,recursive,loaddata) + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 end - xml.assign(d,k,xi) end end end - xml.each_element(xmldata, pattern, include) + if found then + local ns, tg = found.rn or found.ns or "", found.tg + if ns ~= "" then + return ns .. ":" .. tg + else + return tg + end + else + return "" + end end -function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space ! - traverse(root, lpath(pattern), function(r,d,k) - local dkdt = d[k].dt - if dkdt then -- can be optimized - local t = { } - for i=1,#dkdt do - local str = dkdt[i] - if type(str) == "string" then - - if str == "" then - -- stripped +expressions.tag = function(e,n) -- only tg + if not e then + return "" + else + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = (type(e) == "table") and e -- seems to fail + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break else - if nolines then - str = gsub(str,"[ \n\r\t]+"," ") - end - if str == "" then - -- stripped - else - t[#t+1] = str - end + n = n + 1 end - else - t[#t+1] = str end end - d[k].dt = t - end - end) -end - -local function rename_space(root, oldspace, newspace) -- fast variant - local ndt = #root.dt - for i=1,ndt or 0 do - local e = root[i] - if type(e) == "table" then - if e.ns == oldspace then - e.ns = newspace - if e.rn then - e.rn = newspace + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 + end end end - local edt = e.dt - if edt then - rename_space(edt, oldspace, newspace) - end end + return (found and found.tg) or "" end end -xml.rename_space = rename_space +--[[ldx-- +

This is the main filter function. It returns whatever is asked for.

+--ldx]]-- -function xml.remap_tag(root, pattern, newtg) - traverse(root, lpath(pattern), function(r,d,k) - d[k].tg = newtg - end) -end -function xml.remap_namespace(root, pattern, newns) - traverse(root, lpath(pattern), function(r,d,k) - d[k].ns = newns - end) +function xml.filter(root,pattern) -- no longer funny attribute handling here + return parse_apply({ root },pattern) end -function xml.check_namespace(root, pattern, newns) - traverse(root, lpath(pattern), function(r,d,k) - local dk = d[k] - if (not dk.rn or dk.rn == "") and dk.ns == "" then - dk.rn = newns - end - end) + +--[[ldx-- +

Often using an iterators looks nicer in the code than passing handler +functions. The book describes how to use coroutines for that +purpose (). This permits +code like:

+ + +for r, d, k in xml.elements(xml.load('text.xml'),"title") do + print(d[k]) -- old method end -function xml.remap_name(root, pattern, newtg, newns, newrn) - traverse(root, lpath(pattern), function(r,d,k) - local dk = d[k] - dk.tg = newtg - dk.ns = newns - dk.rn = newrn - end) +for e in xml.collected(xml.load('text.xml'),"title") do + print(e) -- new one end + +--ldx]]-- -function xml.filters.found(root,pattern,check_content) - local found = false - traverse(root, lpath(pattern), function(r,d,k) - if check_content then - local dk = d and d[k] - found = dk and dk.dt and next(dk.dt) and true +local wrap, yield = coroutine.wrap, coroutine.yield + +function xml.elements(root,pattern,reverse) -- r, d, k + local collected = parse_apply({ root },pattern) + if collected then + if reverse then + return wrap(function() for c=#collected,1,-1 do + local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) + end end) else - found = true + return wrap(function() for c=1,#collected do + local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) + end end) end - return true - end) - return found + end + return wrap(function() end) end ---[[ldx-- -

Here are a few synonyms.

---ldx]]-- +function xml.collected(root,pattern,reverse) -- e + local collected = parse_apply({ root },pattern) + if collected then + if reverse then + return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end) + else + return wrap(function() for c=1,#collected do yield(collected[c]) end end) + end + end + return wrap(function() end) +end -xml.filters.position = xml.filters.index -xml.count = xml.filters.count -xml.index = xml.filters.index -xml.position = xml.filters.index -xml.first = xml.filters.first -xml.last = xml.filters.last -xml.found = xml.filters.found +end -- of closure -xml.each = xml.each_element -xml.process = xml.process_element -xml.strip = xml.strip_whitespace -xml.collect = xml.collect_elements -xml.all = xml.collect_elements +do -- create closure to overcome 200 locals limit -xml.insert = xml.insert_element_after -xml.inject = xml.inject_element_after -xml.after = xml.insert_element_after -xml.before = xml.insert_element_before -xml.delete = xml.delete_element -xml.replace = xml.replace_element +if not modules then modules = { } end modules ['lxml-mis'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local concat = table.concat +local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring +local format, gsub, match = string.format, string.gsub, string.match +local lpegmatch = lpeg.match --[[ldx-- -

The following helper functions best belong to the lmxl-ini +

The following helper functions best belong to the lxml-ini module. Some are here because we need then in the mk document and other manuals, others came up when playing with this module. Since this module is also used in we've put them here instead of loading mode modules there then needed.

--ldx]]-- -function xml.gsub(t,old,new) +local function xmlgsub(t,old,new) -- will be replaced local dt = t.dt if dt then for k=1,#dt do @@ -5069,28 +6366,26 @@ function xml.gsub(t,old,new) if type(v) == "string" then dt[k] = gsub(v,old,new) else - xml.gsub(v,old,new) + xmlgsub(v,old,new) end end end end +--~ xml.gsub = xmlgsub + function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual - if d and k and d[k-1] and type(d[k-1]) == "string" then - local s = d[k-1]:match("\n(%s+)") - xml.gsub(dk,"\n"..string.rep(" ",#s),"\n") + if d and k then + local dkm = d[k-1] + if dkm and type(dkm) == "string" then + local s = match(dkm,"\n(%s+)") + xmlgsub(dk,"\n"..rep(" ",#s),"\n") + end end end -function xml.serialize_path(root,lpath,handle) - local dk, r, d, k = xml.first(root,lpath) - dk = xml.copy(dk) - xml.strip_leading_spaces(dk,d,k) - xml.serialize(dk,handle) -end - --~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } ---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end +--~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end --~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end --~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end @@ -5114,8 +6409,6 @@ local escaped = Cs(normal * (special * normal)^0) -- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) --- unescaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) --- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0) local normal = (1 - S"&")^0 local special = P("<")/"<" + P(">")/">" + P("&")/"&" local unescaped = Cs(normal * (special * normal)^0) @@ -5124,84 +6417,32 @@ local unescaped = Cs(normal * (special * normal)^0) local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) -function xml.escaped (str) return escaped :match(str) end -function xml.unescaped(str) return unescaped:match(str) end -function xml.cleansed (str) return cleansed :match(str) end +xml.escaped_pattern = escaped +xml.unescaped_pattern = unescaped +xml.cleansed_pattern = cleansed -function xml.join(t,separator,lastseparator) - if #t > 0 then - local result = { } - for k,v in pairs(t) do - result[k] = xml.tostring(v) - end - if lastseparator then - return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result] - else - return concat(result,separator) +function xml.escaped (str) return lpegmatch(escaped,str) end +function xml.unescaped(str) return lpegmatch(unescaped,str) end +function xml.cleansed (str) return lpegmatch(cleansed,str) end + +-- this might move + +function xml.fillin(root,pattern,str,check) + local e = xml.first(root,pattern) + if e then + local n = #e.dt + if not check or n == 0 or (n == 1 and e.dt[1] == "") then + e.dt = { str } end - else - return "" end end -function xml.statistics() - return { - lpathcalls = lpathcalls, - lpathcached = lpathcached, - } -end - --- xml.set_text_cleanup(xml.show_text_entities) --- xml.set_text_cleanup(xml.resolve_text_entities) - ---~ xml.lshow("/../../../a/(b|c)[@d='e']/f") ---~ xml.lshow("/../../../a/!(b|c)[@d='e']/f") ---~ xml.lshow("/../../../a/!b[@d!='e']/f") - ---~ x = xml.convert([[ ---~ ---~ 01 ---~ 02 ---~ 03 ---~ OK ---~ 05 ---~ 06 ---~ ALSO OK ---~ ---~ ]]) - ---~ xml.settrace("lpath",true) - ---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == 'ok']")) ---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == upper('ok')]")) ---~ xml.xshow(xml.first(x,"b[@n=='03' or @n=='08']")) ---~ xml.xshow(xml.all (x,"b[number(@n)>2 and number(@n)<6]")) ---~ xml.xshow(xml.first(x,"b[find(text(),'ALSO')]")) - ---~ str = [[ ---~ ---~ ---~ my secret ---~ ---~ ]] - ---~ x = xml.convert([[ ---~ 0102xx03OK ---~ ]]) ---~ xml.xshow(xml.first(x,"b[tag(2) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(1) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(-1) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(-2) == 'x']")) - ---~ print(xml.filter(x,"b/tag(2)")) ---~ print(xml.filter(x,"b/tag(1)")) - end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['lxml-ent'] = { +if not modules then modules = { } end modules ['lxml-aux'] = { version = 1.001, comment = "this module is the basis for the lxml-* ones", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", @@ -5209,457 +6450,836 @@ if not modules then modules = { } end modules ['lxml-ent'] = { license = "see context related readme files" } -local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, gsub, find = string.format, string.gsub, string.find -local utfchar = unicode.utf8.char +-- not all functions here make sense anymore vbut we keep them for +-- compatibility reasons ---[[ldx-- -

We provide (at least here) two entity handlers. The more extensive -resolver consults a hash first, tries to convert to next, -and finaly calls a handler when defines. When this all fails, the -original entity is returned.

---ldx]]-- +local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end) -xml.entities = xml.entities or { } -- xml.entity_handler == function +local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name +local xmlinheritedconvert = xml.inheritedconvert -function xml.entity_handler(e) - return format("[%s]",e) -end +local type = type +local insert, remove = table.insert, table.remove +local gmatch, gsub = string.gmatch, string.gsub -local function toutf(s) - return utfchar(tonumber(s,16)) +local function report(what,pattern,c,e) + logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern) end -local function utfize(root) - local d = root.dt - for k=1,#d do - local dk = d[k] - if type(dk) == "string" then - -- test prevents copying if no match - if find(dk,"&#x.-;") then - d[k] = gsub(dk,"&#x(.-);",toutf) +local function withelements(e,handle,depth) + if e and handle then + local edt = e.dt + if edt then + depth = depth or 0 + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + handle(e,depth) + withelements(e,handle,depth+1) + end end - else - utfize(dk) end end end -xml.utfize = utfize +xml.withelements = withelements -local function resolve(e) -- hex encoded always first, just to avoid mkii fallbacks - if find(e,"^#x") then - return utfchar(tonumber(e:sub(3),16)) - elseif find(e,"^#") then - return utfchar(tonumber(e:sub(2))) - else - local ee = xml.entities[e] -- we cannot shortcut this one (is reloaded) - if ee then - return ee - else - local h = xml.entity_handler - return (h and h(e)) or "&" .. e .. ";" +function xml.withelement(e,n,handle) -- slow + if e and n ~= 0 and handle then + local edt = e.dt + if edt then + if n > 0 then + for i=1,#edt do + local ei = edt[i] + if type(ei) == "table" then + if n == 1 then + handle(ei) + return + else + n = n - 1 + end + end + end + elseif n < 0 then + for i=#edt,1,-1 do + local ei = edt[i] + if type(ei) == "table" then + if n == -1 then + handle(ei) + return + else + n = n + 1 + end + end + end + end end end end -local function resolve_entities(root) - if not root.special or root.tg == "@rt@" then - local d = root.dt - for k=1,#d do - local dk = d[k] - if type(dk) == "string" then - if find(dk,"&.-;") then - d[k] = gsub(dk,"&(.-);",resolve) - end - else - resolve_entities(dk) +xml.elements_only = xml.collected + +function xml.each_element(root,pattern,handle,reverse) + local collected = xmlparseapply({ root },pattern) + if collected then + if reverse then + for c=#collected,1,-1 do + handle(collected[c]) + end + else + for c=1,#collected do + handle(collected[c]) end end + return collected end end -xml.resolve_entities = resolve_entities +xml.process_elements = xml.each_element -function xml.utfize_text(str) - if find(str,"&#") then - return (gsub(str,"&#x(.-);",toutf)) - else - return str +function xml.process_attributes(root,pattern,handle) + local collected = xmlparseapply({ root },pattern) + if collected and handle then + for c=1,#collected do + handle(collected[c].at) + end end + return collected end -function xml.resolve_text_entities(str) -- maybe an lpeg. maybe resolve inline - if find(str,"&") then - return (gsub(str,"&(.-);",resolve)) - else - return str - end +--[[ldx-- +

The following functions collect elements and texts.

+--ldx]]-- + +-- are these still needed -> lxml-cmp.lua + +function xml.collect_elements(root, pattern) + return xmlparseapply({ root },pattern) end -function xml.show_text_entities(str) - if find(str,"&") then - return (gsub(str,"&(.-);","[%1]")) - else - return str +function xml.collect_texts(root, pattern, flatten) -- todo: variant with handle + local collected = xmlparseapply({ root },pattern) + if collected and flatten then + local xmltostring = xml.tostring + for c=1,#collected do + collected[c] = xmltostring(collected[c].dt) + end end + return collected or { } end --- experimental, this will be done differently - -function xml.merge_entities(root) - local documententities = root.entities - local allentities = xml.entities - if documententities then - for k, v in next, documententities do - allentities[k] = v +function xml.collect_tags(root, pattern, nonamespace) + local collected = xmlparseapply({ root },pattern) + if collected then + local t = { } + for c=1,#collected do + local e = collected[c] + local ns, tg = e.ns, e.tg + if nonamespace then + t[#t+1] = tg + elseif ns == "" then + t[#t+1] = tg + else + t[#t+1] = ns .. ":" .. tg + end end + return t end end +--[[ldx-- +

We've now arrived at the functions that manipulate the tree.

+--ldx]]-- -end -- of closure +local no_root = { no_root = true } -do -- create closure to overcome 200 locals limit +function xml.redo_ni(d) + for k=1,#d do + local dk = d[k] + if type(dk) == "table" then + dk.ni = k + end + end +end -if not modules then modules = { } end modules ['lxml-mis'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} +local function xmltoelement(whatever,root) + if not whatever then + return nil + end + local element + if type(whatever) == "string" then + element = xmlinheritedconvert(whatever,root) + else + element = whatever -- we assume a table + end + if element.error then + return whatever -- string + end + if element then + --~ if element.ri then + --~ element = element.dt[element.ri].dt + --~ else + --~ element = element.dt + --~ end + end + return element +end -local concat = table.concat -local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, gsub = string.format, string.gsub +xml.toelement = xmltoelement ---[[ldx-- -

The following helper functions best belong to the lmxl-ini -module. Some are here because we need then in the mk -document and other manuals, others came up when playing with -this module. Since this module is also used in we've -put them here instead of loading mode modules there then needed.

---ldx]]-- +local function copiedelement(element,newparent) + if type(element) == "string" then + return element + else + element = xmlcopy(element).dt + if newparent and type(element) == "table" then + element.__p__ = newparent + end + return element + end +end -function xml.gsub(t,old,new) - local dt = t.dt - if dt then - for k=1,#dt do - local v = dt[k] - if type(v) == "string" then - dt[k] = gsub(v,old,new) - else - xml.gsub(v,old,new) +function xml.delete_element(root,pattern) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('deleting',pattern,c,e) + end + local d = p.dt + remove(d,e.ni) + xml.redo_ni(d) -- can be made faster and inlined end end end end -function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual - if d and k and d[k-1] and type(d[k-1]) == "string" then - local s = d[k-1]:match("\n(%s+)") - xml.gsub(dk,"\n"..string.rep(" ",#s),"\n") +function xml.replace_element(root,pattern,whatever) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('replacing',pattern,c,e) + end + local d = p.dt + d[e.ni] = copiedelement(element,p) + xml.redo_ni(d) -- probably not needed + end + end end end -function xml.serialize_path(root,lpath,handle) - local dk, r, d, k = xml.first(root,lpath) - dk = xml.copy(dk) - xml.strip_leading_spaces(dk,d,k) - xml.serialize(dk,handle) +local function inject_element(root,pattern,whatever,prepend) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + local d, k, rri = r.dt, e.ni, r.ri + local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt) + if edt then + local be, af + local cp = copiedelement(element,e) + if prepend then + be, af = cp, edt + else + be, af = edt, cp + end + for i=1,#af do + be[#be+1] = af[i] + end + if rri then + r.dt[rri].dt = be + else + d[k].dt = be + end + xml.redo_ni(d) + end + end + end end ---~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } ---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end +local function insert_element(root,pattern,whatever,before) -- todo: element als functie + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + local d, k = r.dt, e.ni + if not before then + k = k + 1 + end + insert(d,k,copiedelement(element,r)) + xml.redo_ni(d) + end + end +end ---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end ---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end ---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>" +xml.insert_element = insert_element +xml.insert_element_after = insert_element +xml.insert_element_before = function(r,p,e) insert_element(r,p,e,true) end +xml.inject_element = inject_element +xml.inject_element_after = inject_element +xml.inject_element_before = function(r,p,e) inject_element(r,p,e,true) end -local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs +local function include(xmldata,pattern,attribute,recursive,loaddata) + -- parse="text" (default: xml), encoding="" (todo) + -- attribute = attribute or 'href' + pattern = pattern or 'include' + loaddata = loaddata or io.loaddata + local collected = xmlparseapply({ xmldata },pattern) + if collected then + for c=1,#collected do + local ek = collected[c] + local name = nil + local ekdt = ek.dt + local ekat = ek.at + local epdt = ek.__p__.dt + if not attribute or attribute == "" then + name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- ckeck, probably always tab or str + end + if not name then + for a in gmatch(attribute or "href","([^|]+)") do + name = ekat[a] + if name then break end + end + end + local data = (name and name ~= "" and loaddata(name)) or "" + if data == "" then + epdt[ek.ni] = "" -- xml.empty(d,k) + elseif ekat["parse"] == "text" then + -- for the moment hard coded + epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data) + else +--~ local settings = xmldata.settings +--~ settings.parent_root = xmldata -- to be tested +--~ local xi = xmlconvert(data,settings) + local xi = xmlinheritedconvert(data,xmldata) + if not xi then + epdt[ek.ni] = "" -- xml.empty(d,k) + else + if recursive then + include(xi,pattern,attribute,recursive,loaddata) + end + epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi) + end + end + end + end +end --- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg --- --- 1021:0335:0287:0247 +xml.include = include --- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ" --- --- 1559:0257:0288:0190 (last one suggested by roberto) +--~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away +--~ local collected = xmlparseapply({ xmldata },pattern) +--~ if collected then +--~ local xmltostring = xml.tostring +--~ for c=1,#collected do +--~ local e = collected[c] +--~ local data = manipulator(xmltostring(e)) +--~ if data == "" then +--~ epdt[e.ni] = "" +--~ else +--~ local xi = xmlinheritedconvert(data,xmldata) +--~ if not xi then +--~ epdt[e.ni] = "" +--~ else +--~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi) +--~ end +--~ end +--~ end +--~ end +--~ end --- escaped = Cs((S("<&>") / xml.escapes + 1)^0) --- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) -local normal = (1 - S("<&>"))^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local escaped = Cs(normal * (special * normal)^0) +--~ xml.manipulate = manipulate --- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) +function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space ! + local collected = xmlparseapply({ root },pattern) + if collected then + for i=1,#collected do + local e = collected[i] + local edt = e.dt + if edt then + local t = { } + for i=1,#edt do + local str = edt[i] + if type(str) == "string" then + if str == "" then + -- stripped + else + if nolines then + str = gsub(str,"[ \n\r\t]+"," ") + end + if str == "" then + -- stripped + else + t[#t+1] = str + end + end + else + --~ str.ni = i + t[#t+1] = str + end + end + e.dt = t + end + end + end +end --- unescaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) --- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0) -local normal = (1 - S"&")^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local unescaped = Cs(normal * (special * normal)^0) +function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing + local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now + if collected then + for i=1,#collected do + local e = collected[i] + local edt = e.dt + if edt then + if anywhere then + local t = { } + for e=1,#edt do + local str = edt[e] + if type(str) ~= "string" then + t[#t+1] = str + elseif str ~= "" then + -- todo: lpeg for each case + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s*(.-)%s*$","%1") + if str ~= "" then + t[#t+1] = str + end + end + end + e.dt = t + else + -- we can assume a regular sparse xml table with no successive strings + -- otherwise we should use a while loop + if #edt > 0 then + -- strip front + local str = edt[1] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt,1) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s+","") + if str == "" then + remove(edt,1) + else + edt[1] = str + end + end + end + if #edt > 1 then + -- strip end + local str = edt[#edt] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"%s+$","") + if str == "" then + remove(edt) + else + edt[#edt] = str + end + end + end + end + end + end + end +end --- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference) +local function rename_space(root, oldspace, newspace) -- fast variant + local ndt = #root.dt + for i=1,ndt or 0 do + local e = root[i] + if type(e) == "table" then + if e.ns == oldspace then + e.ns = newspace + if e.rn then + e.rn = newspace + end + end + local edt = e.dt + if edt then + rename_space(edt, oldspace, newspace) + end + end + end +end -local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) +xml.rename_space = rename_space -xml.escaped_pattern = escaped -xml.unescaped_pattern = unescaped -xml.cleansed_pattern = cleansed +function xml.remap_tag(root, pattern, newtg) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + collected[c].tg = newtg + end + end +end -function xml.escaped (str) return escaped :match(str) end -function xml.unescaped(str) return unescaped:match(str) end -function xml.cleansed (str) return cleansed :match(str) end +function xml.remap_namespace(root, pattern, newns) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + collected[c].ns = newns + end + end +end -function xml.join(t,separator,lastseparator) - if #t > 0 then - local result = { } - for k,v in pairs(t) do - result[k] = xml.tostring(v) +function xml.check_namespace(root, pattern, newns) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + if (not e.rn or e.rn == "") and e.ns == "" then + e.rn = newns + end end - if lastseparator then - return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result] - else - return concat(result,separator) + end +end + +function xml.remap_name(root, pattern, newtg, newns, newrn) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + e.tg, e.ns, e.rn = newtg, newns, newrn end - else - return "" end end +--[[ldx-- +

Here are a few synonyms.

+--ldx]]-- + +xml.each = xml.each_element +xml.process = xml.process_element +xml.strip = xml.strip_whitespace +xml.collect = xml.collect_elements +xml.all = xml.collect_elements + +xml.insert = xml.insert_element_after +xml.inject = xml.inject_element_after +xml.after = xml.insert_element_after +xml.before = xml.insert_element_before +xml.delete = xml.delete_element +xml.replace = xml.replace_element + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['trac-tra'] = { +if not modules then modules = { } end modules ['lxml-xml'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "this module is the basis for the lxml-* ones", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } --- the tag is kind of generic and used for functions that are not --- bound to a variable, like node.new, node.copy etc (contrary to for instance --- node.has_attribute which is bound to a has_attribute local variable in mkiv) +local finalizers = xml.finalizers.xml +local xmlfilter = xml.filter -- we could inline this one for speed +local xmltostring = xml.tostring +local xmlserialize = xml.serialize -debugger = debugger or { } +local function first(collected) -- wrong ? + return collected and collected[1] +end -local counters = { } -local names = { } -local getinfo = debug.getinfo -local format, find, lower, gmatch = string.format, string.find, string.lower, string.gmatch +local function last(collected) + return collected and collected[#collected] +end --- one +local function all(collected) + return collected +end -local function hook() - local f = getinfo(2,"f").func - local n = getinfo(2,"Sn") --- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end - if f then - local cf = counters[f] - if cf == nil then - counters[f] = 1 - names[f] = n - else - counters[f] = cf + 1 +local function reverse(collected) + if collected then + local reversed = { } + for c=#collected,1,-1 do + reversed[#reversed+1] = collected[c] end + return reversed end end -local function getname(func) - local n = names[func] - if n then - if n.what == "C" then - return n.name or '' - else - -- source short_src linedefined what name namewhat nups func - local name = n.name or n.namewhat or n.what - if not name or name == "" then name = "?" end - return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name) - end - else - return "unknown" + +local function attribute(collected,name) + if collected and #collected > 0 then + local at = collected[1].at + return at and at[name] end end -function debugger.showstats(printer,threshold) - printer = printer or texio.write or print - threshold = threshold or 0 - local total, grandtotal, functions = 0, 0, 0 - printer("\n") -- ugly but ok - -- table.sort(counters) - for func, count in pairs(counters) do - if count > threshold then - local name = getname(func) - if not name:find("for generator") then - printer(format("%8i %s", count, name)) - total = total + count - end - end - grandtotal = grandtotal + count - functions = functions + 1 - end - printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) + +local function att(id,name) + local at = id.at + return at and at[name] end --- two +local function count(collected) + return (collected and #collected) or 0 +end ---~ local function hook() ---~ local n = getinfo(2) ---~ if n.what=="C" and not n.name then ---~ local f = tostring(debug.traceback()) ---~ local cf = counters[f] ---~ if cf == nil then ---~ counters[f] = 1 ---~ names[f] = n ---~ else ---~ counters[f] = cf + 1 ---~ end ---~ end ---~ end ---~ function debugger.showstats(printer,threshold) ---~ printer = printer or texio.write or print ---~ threshold = threshold or 0 ---~ local total, grandtotal, functions = 0, 0, 0 ---~ printer("\n") -- ugly but ok ---~ -- table.sort(counters) ---~ for func, count in pairs(counters) do ---~ if count > threshold then ---~ printer(format("%8i %s", count, func)) ---~ total = total + count ---~ end ---~ grandtotal = grandtotal + count ---~ functions = functions + 1 ---~ end ---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) ---~ end +local function position(collected,n) + if collected then + n = tonumber(n) or 0 + if n < 0 then + return collected[#collected + n + 1] + elseif n > 0 then + return collected[n] + else + return collected[1].mi or 0 + end + end +end --- rest +local function match(collected) + return (collected and collected[1].mi) or 0 -- match +end -function debugger.savestats(filename,threshold) - local f = io.open(filename,'w') - if f then - debugger.showstats(function(str) f:write(str) end,threshold) - f:close() +local function index(collected) + if collected then + return collected[1].ni end end -function debugger.enable() - debug.sethook(hook,"c") +local function attributes(collected,arguments) + if collected then + local at = collected[1].at + if arguments then + return at[arguments] + elseif next(at) then + return at -- all of them + end + end end -function debugger.disable() - debug.sethook() ---~ counters[debug.getinfo(2,"f").func] = nil +local function chainattribute(collected,arguments) -- todo: optional levels + if collected then + local e = collected[1] + while e do + local at = e.at + if at then + local a = at[arguments] + if a then + return a + end + else + break -- error + end + e = e.__p__ + end + end + return "" end -function debugger.tracing() - local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0 - if n > 0 then - function debugger.tracing() return true end ; return true +local function raw(collected) -- hybrid + if collected then + local e = collected[1] or collected + return (e and xmlserialize(e)) or "" -- only first as we cannot concat function else - function debugger.tracing() return false end ; return false + return "" end end ---~ debugger.enable() - ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) - ---~ debugger.disable() - ---~ print("") ---~ debugger.showstats() ---~ print("") ---~ debugger.showstats(print,3) - -trackers = trackers or { } +local function text(collected) -- hybrid + if collected then + local e = collected[1] or collected + return (e and xmltostring(e.dt)) or "" + else + return "" + end +end -local data, done = { }, { } +local function texts(collected) + if collected then + local t = { } + for c=1,#collected do + local e = collection[c] + if e and e.dt then + t[#t+1] = e.dt + end + end + return t + end +end -local function set(what,value) - if type(what) == "string" then - what = aux.settings_to_array(what) +local function tag(collected,n) + if collected then + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[#collected-n+1] + end + return c and c.tg end - for i=1,#what do - local w = what[i] - for d, f in next, data do - if done[d] then - -- prevent recursion due to wildcards - elseif find(d,w) then - done[d] = true - for i=1,#f do - f[i](value) - end +end + +local function name(collected,n) + if collected then + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[#collected-n+1] + end + if c then + if c.ns == "" then + return c.tg + else + return c.ns .. ":" .. c.tg end end end end -local function reset() - for d, f in next, data do - for i=1,#f do - f[i](false) +local function tags(collected,nonamespace) + if collected then + local t = { } + for c=1,#collected do + local e = collected[c] + local ns, tg = e.ns, e.tg + if nonamespace or ns == "" then + t[#t+1] = tg + else + t[#t+1] = ns .. ":" .. tg + end end + return t end end -function trackers.register(what,...) - what = lower(what) - local w = data[what] - if not w then - w = { } - data[what] = w - end - for _, fnc in next, { ... } do - local typ = type(fnc) - if typ == "function" then - w[#w+1] = fnc - elseif typ == "string" then - w[#w+1] = function(value) set(fnc,value,nesting) end +local function empty(collected) + if collected then + for c=1,#collected do + local e = collected[c] + if e then + local edt = e.dt + if edt then + local n = #edt + if n == 1 then + local edk = edt[1] + local typ = type(edk) + if typ == "table" then + return false + elseif edk ~= "" then -- maybe an extra tester for spacing only + return false + end + elseif n > 1 then + return false + end + end + end end end + return true end -function trackers.enable(what) - done = { } - set(what,true) +finalizers.first = first +finalizers.last = last +finalizers.all = all +finalizers.reverse = reverse +finalizers.elements = all +finalizers.default = all +finalizers.attribute = attribute +finalizers.att = att +finalizers.count = count +finalizers.position = position +finalizers.match = match +finalizers.index = index +finalizers.attributes = attributes +finalizers.chainattribute = chainattribute +finalizers.text = text +finalizers.texts = texts +finalizers.tag = tag +finalizers.name = name +finalizers.tags = tags +finalizers.empty = empty + +-- shortcuts -- we could support xmlfilter(id,pattern,first) + +function xml.first(id,pattern) + return first(xmlfilter(id,pattern)) end -function trackers.disable(what) - done = { } - if not what or what == "" then - trackers.reset(what) +function xml.last(id,pattern) + return last(xmlfilter(id,pattern)) +end + +function xml.count(id,pattern) + return count(xmlfilter(id,pattern)) +end + +function xml.attribute(id,pattern,a,default) + return attribute(xmlfilter(id,pattern),a,default) +end + +function xml.raw(id,pattern) + if pattern then + return raw(xmlfilter(id,pattern)) else - set(what,false) + return raw(id) end end -function trackers.reset(what) - done = { } - reset() +function xml.text(id,pattern) + if pattern then + -- return text(xmlfilter(id,pattern)) + local collected = xmlfilter(id,pattern) + return (collected and xmltostring(collected[1].dt)) or "" + elseif id then + -- return text(id) + return xmltostring(id.dt) or "" + else + return "" + end end -function trackers.list() -- pattern - local list = table.sortedkeys(data) - local user, system = { }, { } - for l=1,#list do - local what = list[l] - if find(what,"^%*") then - system[#system+1] = what - else - user[#user+1] = what - end - end - return user, system +xml.content = text + +function xml.position(id,pattern,n) -- element + return position(xmlfilter(id,pattern),n) +end + +function xml.match(id,pattern) -- number + return match(xmlfilter(id,pattern)) +end + +function xml.empty(id,pattern) + return empty(xmlfilter(id,pattern)) end +xml.all = xml.filter +xml.index = xml.position +xml.found = xml.filter + end -- of closure @@ -5667,7 +7287,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-env'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -5679,10 +7299,10 @@ if not modules then modules = { } end modules ['luat-env'] = { -- evolved before bytecode arrays were available and so a lot of -- code has disappeared already. -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local format = string.format +local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find +local unquote, quote = string.unquote, string.quote -- precautions @@ -5716,13 +7336,14 @@ if not environment.jobname then environ function environment.initialize_arguments(arg) local arguments, files = { }, { } environment.arguments, environment.files, environment.sortedflags = arguments, files, nil - for index, argument in pairs(arg) do + for index=1,#arg do + local argument = arg[index] if index > 0 then - local flag, value = argument:match("^%-+(.+)=(.-)$") + local flag, value = match(argument,"^%-+(.-)=(.-)$") if flag then - arguments[flag] = string.unquote(value or "") + arguments[flag] = unquote(value or "") else - flag = argument:match("^%-+(.+)") + flag = match(argument,"^%-+(.+)") if flag then arguments[flag] = true else @@ -5749,25 +7370,30 @@ function environment.argument(name,partial) return arguments[name] elseif partial then if not sortedflags then - sortedflags = { } - for _,v in pairs(table.sortedkeys(arguments)) do - sortedflags[#sortedflags+1] = "^" .. v + sortedflags = table.sortedkeys(arguments) + for k=1,#sortedflags do + sortedflags[k] = "^" .. sortedflags[k] end environment.sortedflags = sortedflags end -- example of potential clash: ^mode ^modefile - for _,v in ipairs(sortedflags) do - if name:find(v) then - return arguments[v:sub(2,#v)] + for k=1,#sortedflags do + local v = sortedflags[k] + if find(name,v) then + return arguments[sub(v,2,#v)] end end end return nil end +environment.argument("x",true) + function environment.split_arguments(separator) -- rather special, cut-off before separator local done, before, after = false, { }, { } - for _,v in ipairs(environment.original_arguments) do + local original_arguments = environment.original_arguments + for k=1,#original_arguments do + local v = original_arguments[k] if not done and v == separator then done = true elseif done then @@ -5784,16 +7410,17 @@ function environment.reconstruct_commandline(arg,noquote) if noquote and #arg == 1 then local a = arg[1] a = resolvers.resolve(a) - a = a:unquote() + a = unquote(a) return a - elseif next(arg) then + elseif #arg > 0 then local result = { } - for _,a in ipairs(arg) do -- ipairs 1 .. #n + for i=1,#arg do + local a = arg[i] a = resolvers.resolve(a) - a = a:unquote() - a = a:gsub('"','\\"') -- tricky - if a:find(" ") then - result[#result+1] = a:quote() + a = unquote(a) + a = gsub(a,'"','\\"') -- tricky + if find(a," ") then + result[#result+1] = quote(a) else result[#result+1] = a end @@ -5806,17 +7433,18 @@ end if arg then - -- new, reconstruct quoted snippets (maybe better just remnove the " then and add them later) + -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) local newarg, instring = { }, false - for index, argument in ipairs(arg) do - if argument:find("^\"") then - newarg[#newarg+1] = argument:gsub("^\"","") - if not argument:find("\"$") then + for index=1,#arg do + local argument = arg[index] + if find(argument,"^\"") then + newarg[#newarg+1] = gsub(argument,"^\"","") + if not find(argument,"\"$") then instring = true end - elseif argument:find("\"$") then - newarg[#newarg] = newarg[#newarg] .. " " .. argument:gsub("\"$","") + elseif find(argument,"\"$") then + newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") instring = false elseif instring then newarg[#newarg] = newarg[#newarg] .. " " .. argument @@ -5871,12 +7499,12 @@ function environment.luafilechunk(filename) -- used for loading lua bytecode in filename = file.replacesuffix(filename, "lua") local fullname = environment.luafile(filename) if fullname and fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading file %s", fullname) end return environment.loadedluacode(fullname) else - if trace_verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end return nil @@ -5896,7 +7524,7 @@ function environment.loadluafile(filename, version) -- when not overloaded by explicit suffix we look for a luc file first local fullname = (lucname and environment.luafile(lucname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check @@ -5914,7 +7542,7 @@ function environment.loadluafile(filename, version) if v == version then return true else - if trace_verbose then + if trace_locating then logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version) end environment.loadluafile(filename) @@ -5925,12 +7553,12 @@ function environment.loadluafile(filename, version) end fullname = (luaname and environment.luafile(luaname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check if not chunk then - if verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end else @@ -5948,7 +7576,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-inf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-inf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -5973,6 +7601,14 @@ function statistics.hastimer(instance) return instance and instance.starttime end +function statistics.resettiming(instance) + if not instance then + notimer = { timing = 0, loadtime = 0 } + else + instance.timing, instance.loadtime = 0, 0 + end +end + function statistics.starttiming(instance) if not instance then notimer = { } @@ -5987,6 +7623,8 @@ function statistics.starttiming(instance) if not instance.loadtime then instance.loadtime = 0 end + else +--~ logs.report("system","nested timing (%s)",tostring(instance)) end instance.timing = it + 1 end @@ -6032,6 +7670,12 @@ function statistics.elapsedindeed(instance) return t > statistics.threshold end +function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds + if statistics.elapsedindeed(instance) then + return format("%s seconds %s", statistics.elapsedtime(instance),rest or "") + end +end + -- general function function statistics.register(tag,fnc) @@ -6110,14 +7754,32 @@ function statistics.timed(action,report) report("total runtime: %s",statistics.elapsedtime(timer)) end +-- where, not really the best spot for this: + +commands = commands or { } + +local timer + +function commands.resettimer() + statistics.resettiming(timer) + statistics.starttiming(timer) +end + +function commands.elapsedtime() + statistics.stoptiming(timer) + tex.sprint(statistics.elapsedtime(timer)) +end + +commands.resettimer() + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['luat-log'] = { +if not modules then modules = { } end modules ['trac-log'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-log.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6125,7 +7787,11 @@ if not modules then modules = { } end modules ['luat-log'] = { -- this is old code that needs an overhaul -local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format +--~ io.stdout:setvbuf("no") +--~ io.stderr:setvbuf("no") + +local write_nl, write = texio.write_nl or print, texio.write or io.write +local format, gmatch = string.format, string.gmatch local texcount = tex and tex.count if texlua then @@ -6206,25 +7872,48 @@ function logs.tex.line(fmt,...) -- new end end +--~ function logs.tex.start_page_number() +--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +--~ if real > 0 then +--~ if user > 0 then +--~ if sub > 0 then +--~ write(format("[%s.%s.%s",real,user,sub)) +--~ else +--~ write(format("[%s.%s",real,user)) +--~ end +--~ else +--~ write(format("[%s",real)) +--~ end +--~ else +--~ write("[-") +--~ end +--~ end + +--~ function logs.tex.stop_page_number() +--~ write("]") +--~ end + +local real, user, sub + function logs.tex.start_page_number() - local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno + real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +end + +function logs.tex.stop_page_number() if real > 0 then if user > 0 then if sub > 0 then - write(format("[%s.%s.%s",real,user,sub)) + logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub) else - write(format("[%s.%s",real,user)) + logs.report("pages", "flushing realpage %s, userpage %s",real,user) end else - write(format("[%s",real)) + logs.report("pages", "flushing realpage %s",real) end else - write("[-") + logs.report("pages", "flushing page") end -end - -function logs.tex.stop_page_number() - write("]") + io.flush() end logs.tex.report_job_stat = statistics.show_job_stat @@ -6324,7 +8013,7 @@ end function logs.setprogram(_name_,_banner_,_verbose_) name, banner = _name_, _banner_ if _verbose_ then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") end logs.set_method("tex") logs.report = report -- also used in libraries @@ -6337,9 +8026,9 @@ end function logs.setverbose(what) if what then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") else - trackers.disable("resolvers.verbose") + trackers.disable("resolvers.locating") end logs.verbose = what or false end @@ -6356,7 +8045,7 @@ logs.report = logs.tex.report logs.simple = logs.tex.report function logs.reportlines(str) -- todo: - for line in str:gmatch("(.-)[\n\r]") do + for line in gmatch(str,"(.-)[\n\r]") do logs.report(line) end end @@ -6367,8 +8056,12 @@ end logs.simpleline = logs.reportline -function logs.help(message,option) +function logs.reportbanner() -- for scripts too logs.report(banner) +end + +function logs.help(message,option) + logs.reportbanner() logs.reportline() logs.reportlines(message) local moreinfo = logs.moreinfo or "" @@ -6400,6 +8093,11 @@ end --~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") --~ end +function logs.fatal(where,...) + logs.report(where,"fatal error: %s, aborting now",format(...)) + os.exit() +end + end -- of closure @@ -6407,10 +8105,10 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", - comment = "companion to luat-lib.tex", } -- After a few years using the code the large luat-inp.lua file @@ -6422,7 +8120,7 @@ if not modules then modules = { } end modules ['data-inp'] = { -- * some public auxiliary functions were made private -- -- TODO: os.getenv -> os.env[] --- TODO: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller) +-- TODO: instances.[hashes,cnffiles,configurations,522] -- TODO: check escaping in find etc, too much, too slow -- This lib is multi-purpose and can be loaded again later on so that @@ -6443,12 +8141,13 @@ if not modules then modules = { } end modules ['data-inp'] = { local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys local next, type = next, type +local lpegmatch = lpeg.match -local trace_locating, trace_detail, trace_verbose = false, false, false +local trace_locating, trace_detail, trace_expansions = false, false, false -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) -trackers.register("resolvers.detail", function(v) trace_detail = v trackers.enable("resolvers.verbose,resolvers.detail") end) +trackers.register("resolvers.locating", function(v) trace_locating = v end) +trackers.register("resolvers.details", function(v) trace_detail = v end) +trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo if not resolvers then resolvers = { @@ -6472,7 +8171,7 @@ resolvers.generators.notfound = { nil } resolvers.cacheversion = '1.0.1' resolvers.cnfname = 'texmf.cnf' resolvers.luaname = 'texmfcnf.lua' -resolvers.homedir = os.env[os.platform == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' +resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}' local dummy_path_expr = "^!*unset/*$" @@ -6514,8 +8213,8 @@ suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' } alternatives['map files'] = 'map' alternatives['enc files'] = 'enc' -alternatives['cid files'] = 'cid' -alternatives['fea files'] = 'fea' +alternatives['cid maps'] = 'cid' -- great, why no cid files +alternatives['font feature files'] = 'fea' -- and fea files here alternatives['opentype fonts'] = 'otf' alternatives['truetype fonts'] = 'ttf' alternatives['truetype collections'] = 'ttc' @@ -6531,6 +8230,11 @@ formats ['sfd'] = 'SFDFONTS' suffixes ['sfd'] = { 'sfd' } alternatives['subfont definition files'] = 'sfd' +-- lib paths + +formats ['lib'] = 'CLUAINPUTS' -- new (needs checking) +suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' } + -- In practice we will work within one tds tree, but i want to keep -- the option open to build tools that look at multiple trees, which is -- why we keep the tree specific data in a table. We used to pass the @@ -6653,8 +8357,10 @@ local function check_configuration() -- not yet ok, no time for debugging now -- bad luck end fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm - fix("FONTFEATURES", ".;$TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") - fix("FONTCIDMAPS" , ".;$TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- this will go away some day + fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//") end @@ -6669,7 +8375,7 @@ function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail' end end -resolvers.settrace(os.getenv("MTX.resolvers.TRACE") or os.getenv("MTX_INPUT_TRACE")) +resolvers.settrace(os.getenv("MTX_INPUT_TRACE")) function resolvers.osenv(key) local ie = instance.environment @@ -6757,37 +8463,43 @@ end -- work that well; the parsing is ok, but dealing with the resulting -- table is a pain because we need to work inside-out recursively +local function do_first(a,b) + local t = { } + for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end + return "{" .. concat(t,",") .. "}" +end + +local function do_second(a,b) + local t = { } + for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end + return "{" .. concat(t,",") .. "}" +end + +local function do_both(a,b) + local t = { } + for sa in gmatch(a,"[^,]+") do + for sb in gmatch(b,"[^,]+") do + t[#t+1] = sa .. sb + end + end + return "{" .. concat(t,",") .. "}" +end + +local function do_three(a,b,c) + return a .. b.. c +end + local function splitpathexpr(str, t, validate) -- no need for further optimization as it is only called a - -- few times, we can use lpeg for the sub; we could move - -- the local functions outside the body + -- few times, we can use lpeg for the sub + if trace_expansions then + logs.report("fileio","expanding variable '%s'",str) + end t = t or { } str = gsub(str,",}",",@}") str = gsub(str,"{,","{@,") -- str = "@" .. str .. "@" local ok, done - local function do_first(a,b) - local t = { } - for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end - return "{" .. concat(t,",") .. "}" - end - local function do_second(a,b) - local t = { } - for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end - return "{" .. concat(t,",") .. "}" - end - local function do_both(a,b) - local t = { } - for sa in gmatch(a,"[^,]+") do - for sb in gmatch(b,"[^,]+") do - t[#t+1] = sa .. sb - end - end - return "{" .. concat(t,",") .. "}" - end - local function do_three(a,b,c) - return a .. b.. c - end while true do done = false while true do @@ -6818,6 +8530,11 @@ local function splitpathexpr(str, t, validate) t[#t+1] = s end end + if trace_expansions then + for k=1,#t do + logs.report("fileio","% 4i: %s",k,t[k]) + end + end return t end @@ -6857,18 +8574,27 @@ end -- also we now follow the stupid route: if not set then just assume *one* -- cnf file under texmf (i.e. distribution) -resolvers.ownpath = resolvers.ownpath or nil -resolvers.ownbin = resolvers.ownbin or arg[-2] or arg[-1] or arg[0] or "luatex" -resolvers.autoselfdir = true -- false may be handy for debugging +local args = environment and environment.original_arguments or arg -- this needs a cleanup + +resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" +resolvers.ownbin = gsub(resolvers.ownbin,"\\","/") function resolvers.getownpath() - if not resolvers.ownpath then - if resolvers.autoselfdir and os.selfdir then - resolvers.ownpath = os.selfdir - else - local binary = resolvers.ownbin - if os.platform == "windows" then - binary = file.replacesuffix(binary,"exe") + local ownpath = resolvers.ownpath or os.selfdir + if not ownpath or ownpath == "" or ownpath == "unset" then + ownpath = args[-1] or arg[-1] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + if not ownpath or ownpath == "" then + ownpath = args[-0] or arg[-0] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + end + local binary = resolvers.ownbin + if not ownpath or ownpath == "" then + ownpath = ownpath and file.dirname(binary) + end + if not ownpath or ownpath == "" then + if os.binsuffix ~= "" then + binary = file.replacesuffix(binary,os.binsuffix) end for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do local b = file.join(p,binary) @@ -6880,30 +8606,39 @@ function resolvers.getownpath() local olddir = lfs.currentdir() if lfs.chdir(p) then local pp = lfs.currentdir() - if trace_verbose and p ~= pp then - logs.report("fileio","following symlink %s to %s",p,pp) + if trace_locating and p ~= pp then + logs.report("fileio","following symlink '%s' to '%s'",p,pp) end - resolvers.ownpath = pp + ownpath = pp lfs.chdir(olddir) else - if trace_verbose then - logs.report("fileio","unable to check path %s",p) + if trace_locating then + logs.report("fileio","unable to check path '%s'",p) end - resolvers.ownpath = p + ownpath = p end break end end end - if not resolvers.ownpath then resolvers.ownpath = '.' end + if not ownpath or ownpath == "" then + ownpath = "." + logs.report("fileio","forcing fallback ownpath .") + elseif trace_locating then + logs.report("fileio","using ownpath '%s'",ownpath) + end end - return resolvers.ownpath + resolvers.ownpath = ownpath + function resolvers.getownpath() + return resolvers.ownpath + end + return ownpath end local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" } local function identify_own() - local ownpath = resolvers.getownpath() or lfs.currentdir() + local ownpath = resolvers.getownpath() or dir.current() local ie = instance.environment if ownpath then if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end @@ -6916,10 +8651,10 @@ local function identify_own() if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end - if trace_verbose then + if trace_locating then for i=1,#own_places do local v = own_places[i] - logs.report("fileio","variable %s set to %s",v,resolvers.env(v) or "unknown") + logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown") end end identify_own = function() end @@ -6951,10 +8686,8 @@ end local function load_cnf_file(fname) fname = resolvers.clean_path(fname) local lname = file.replacesuffix(fname,'lua') - local f = io.open(lname) - if f then -- this will go - f:close() - local dname = file.dirname(fname) + if lfs.isfile(lname) then + local dname = file.dirname(fname) -- fname ? if not instance.configuration[dname] then resolvers.load_data(dname,'configuration',lname and file.basename(lname)) instance.order[#instance.order+1] = instance.configuration[dname] @@ -6962,8 +8695,8 @@ local function load_cnf_file(fname) else f = io.open(fname) if f then - if trace_verbose then - logs.report("fileio","loading %s", fname) + if trace_locating then + logs.report("fileio","loading configuration file %s", fname) end local line, data, n, k, v local dname = file.dirname(fname) @@ -6997,14 +8730,16 @@ local function load_cnf_file(fname) end end f:close() - elseif trace_verbose then - logs.report("fileio","skipping %s", fname) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'", fname) end end end local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared) - for _,c in ipairs(instance.order) do + local order = instance.order + for i=1,#order do + local c = order[i] for k,v in next, c do if not instance.variables[k] then if instance.environment[k] then @@ -7020,19 +8755,24 @@ end function resolvers.load_cnf() local function loadoldconfigdata() - for _, fname in ipairs(instance.cnffiles) do - load_cnf_file(fname) + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + load_cnf_file(cnffiles[i]) end end -- instance.cnffiles contain complete names now ! + -- we still use a funny mix of cnf and new but soon + -- we will switch to lua exclusively as we only use + -- the file to collect the tree roots if #instance.cnffiles == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)") end else - instance.rootpath = instance.cnffiles[1] - for k,fname in ipairs(instance.cnffiles) do - instance.cnffiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local cnffiles = instance.cnffiles + instance.rootpath = cnffiles[1] + for k=1,#cnffiles do + instance.cnffiles[k] = file.collapse_path(cnffiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -7060,8 +8800,9 @@ function resolvers.load_lua() -- yet harmless else instance.rootpath = instance.luafiles[1] - for k,fname in ipairs(instance.luafiles) do - instance.luafiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local luafiles = instance.luafiles + for k=1,#luafiles do + instance.luafiles[k] = file.collapse_path(luafiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -7093,14 +8834,14 @@ end function resolvers.append_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash append: %s",tag) + logs.report("fileio","hash '%s' appended",tag) end insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } ) end function resolvers.prepend_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash prepend: %s",tag) + logs.report("fileio","hash '%s' prepended",tag) end insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } ) end @@ -7124,9 +8865,11 @@ end -- locators function resolvers.locatelists() - for _, path in ipairs(resolvers.clean_path_list('TEXMF')) do - if trace_verbose then - logs.report("fileio","locating list of %s",path) + local texmfpaths = resolvers.clean_path_list('TEXMF') + for i=1,#texmfpaths do + local path = texmfpaths[i] + if trace_locating then + logs.report("fileio","locating list of '%s'",path) end resolvers.locatedatabase(file.collapse_path(path)) end @@ -7139,11 +8882,11 @@ end function resolvers.locators.tex(specification) if specification and specification ~= '' and lfs.isdir(specification) then if trace_locating then - logs.report("fileio",'! tex locator found: %s',specification) + logs.report("fileio","tex locator '%s' found",specification) end resolvers.append_hash('file',specification,filename) elseif trace_locating then - logs.report("fileio",'? tex locator not found: %s',specification) + logs.report("fileio","tex locator '%s' not found",specification) end end @@ -7157,7 +8900,9 @@ function resolvers.loadfiles() instance.loaderror = false instance.files = { } if not instance.renewcache then - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for k=1,#hashes do + local hash = hashes[k] resolvers.hashdatabase(hash.tag,hash.name) if instance.loaderror then break end end @@ -7171,8 +8916,9 @@ end -- generators: function resolvers.loadlists() - for _, hash in ipairs(instance.hashes) do - resolvers.generatedatabase(hash.tag) + local hashes = instance.hashes + for i=1,#hashes do + resolvers.generatedatabase(hashes[i].tag) end end @@ -7184,10 +8930,27 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.patterns.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification - if trace_verbose then - logs.report("fileio","scanning path %s",specification) + if trace_locating then + logs.report("fileio","scanning path '%s'",specification) end instance.files[tag] = { } local files = instance.files[tag] @@ -7203,7 +8966,8 @@ function resolvers.generators.tex(specification) full = spec end for name in directory(full) do - if not weird:match(name) then + if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then @@ -7236,7 +9000,7 @@ function resolvers.generators.tex(specification) end end action() - if trace_verbose then + if trace_locating then logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r) end end @@ -7251,11 +9015,48 @@ end -- we join them and split them after the expansion has taken place. This -- is more convenient. +--~ local checkedsplit = string.checkedsplit + +local cache = { } + +local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;"))) + +local function split_kpse_path(str) -- beware, this can be either a path or a {specification} + local found = cache[str] + if not found then + if str == "" then + found = { } + else + str = gsub(str,"\\","/") +--~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator) +local split = lpegmatch(splitter,str) + found = { } + for i=1,#split do + local s = split[i] + if not find(s,"^{*unset}*") then + found[#found+1] = s + end + end + if trace_expansions then + logs.report("fileio","splitting path specification '%s'",str) + for k=1,#found do + logs.report("fileio","% 4i: %s",k,found[k]) + end + end + cache[str] = found + end + end + return found +end + +resolvers.split_kpse_path = split_kpse_path + function resolvers.splitconfig() - for i,c in ipairs(instance) do - for k,v in pairs(c) do + for i=1,#instance do + local c = instance[i] + for k,v in next, c do if type(v) == 'string' then - local t = file.split_path(v) + local t = split_kpse_path(v) if #t > 1 then c[k] = t end @@ -7265,21 +9066,25 @@ function resolvers.splitconfig() end function resolvers.joinconfig() - for i,c in ipairs(instance.order) do - for k,v in pairs(c) do -- ipairs? + local order = instance.order + for i=1,#order do + local c = order[i] + for k,v in next, c do -- indexed? if type(v) == 'table' then c[k] = file.join_path(v) end end end end + function resolvers.split_path(str) if type(str) == 'table' then return str else - return file.split_path(str) + return split_kpse_path(str) end end + function resolvers.join_path(str) if type(str) == 'table' then return file.join_path(str) @@ -7291,8 +9096,9 @@ end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do - local t, h = { }, { } - for _,vv in ipairs(file.split_path(v)) do + local t, h, p = { }, { }, split_kpse_path(v) + for kk=1,#p do + local vv = p[kk] if vv ~= "" and not h[vv] then t[#t+1] = vv h[vv] = true @@ -7339,11 +9145,15 @@ function resolvers.serialize(files) end t[#t+1] = "return {" if instance.sortdata then - for _, k in pairs(sortedkeys(files)) do -- ipairs + local sortedfiles = sortedkeys(files) + for i=1,#sortedfiles do + local k = sortedfiles[i] local fk = files[k] if type(fk) == 'table' then t[#t+1] = "\t['" .. k .. "']={" - for _, kk in pairs(sortedkeys(fk)) do -- ipairs + local sortedfk = sortedkeys(fk) + for j=1,#sortedfk do + local kk = sortedfk[j] t[#t+1] = dump(kk,fk[kk],"\t\t") end t[#t+1] = "\t}," @@ -7368,12 +9178,18 @@ function resolvers.serialize(files) return concat(t,"\n") end +local data_state = { } + +function resolvers.data_state() + return data_state or { } +end + function resolvers.save_data(dataname, makename) -- untested without cache overload for cachename, files in next, instance[dataname] do local name = (makename or file.join)(cachename,dataname) local luaname, lucname = name .. ".lua", name .. ".luc" - if trace_verbose then - logs.report("fileio","preparing %s for %s",dataname,cachename) + if trace_locating then + logs.report("fileio","preparing '%s' for '%s'",dataname,cachename) end for k, v in next, files do if type(v) == "table" and #v == 1 then @@ -7387,24 +9203,25 @@ function resolvers.save_data(dataname, makename) -- untested without cache overl date = os.date("%Y-%m-%d"), time = os.date("%H:%M:%S"), content = files, + uuid = os.uuid(), } local ok = io.savedata(luaname,resolvers.serialize(data)) if ok then - if trace_verbose then - logs.report("fileio","%s saved in %s",dataname,luaname) + if trace_locating then + logs.report("fileio","'%s' saved in '%s'",dataname,luaname) end if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip - if trace_verbose then - logs.report("fileio","%s compiled to %s",dataname,lucname) + if trace_locating then + logs.report("fileio","'%s' compiled to '%s'",dataname,lucname) end else - if trace_verbose then - logs.report("fileio","compiling failed for %s, deleting file %s",dataname,lucname) + if trace_locating then + logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname) end os.remove(lucname) end - elseif trace_verbose then - logs.report("fileio","unable to save %s in %s (access error)",dataname,luaname) + elseif trace_locating then + logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname) end end end @@ -7416,19 +9233,20 @@ function resolvers.load_data(pathname,dataname,filename,makename) -- untested wi if blob then local data = blob() if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then - if trace_verbose then - logs.report("fileio","loading %s for %s from %s",dataname,pathname,filename) + data_state[#data_state+1] = data.uuid + if trace_locating then + logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = data.content else - if trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + if trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + elseif trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end end @@ -7447,15 +9265,17 @@ function resolvers.resetconfig() end function resolvers.loadnewconfig() - for _, cnf in ipairs(instance.luafiles) do + local luafiles = instance.luafiles + for i=1,#luafiles do + local cnf = luafiles[i] local pathname = file.dirname(cnf) local filename = file.join(pathname,resolvers.luaname) local blob = loadfile(filename) if blob then local data = blob() if data then - if trace_verbose then - logs.report("fileio","loading configuration file %s",filename) + if trace_locating then + logs.report("fileio","loading configuration file '%s'",filename) end if true then -- flatten to variable.progname @@ -7476,14 +9296,14 @@ function resolvers.loadnewconfig() instance['setup'][pathname] = data end else - if trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + if trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance['setup'][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance.order[#instance.order+1] = instance.setup[pathname] if instance.loaderror then break end @@ -7492,7 +9312,9 @@ end function resolvers.loadoldconfig() if not instance.renewcache then - for _, cnf in ipairs(instance.cnffiles) do + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + local cnf = cnffiles[i] local dname = file.dirname(cnf) resolvers.load_data(dname,'configuration') instance.order[#instance.order+1] = instance.configuration[dname] @@ -7682,7 +9504,7 @@ end function resolvers.expanded_path_list(str) if not str then - return ep or { } + return ep or { } -- ep ? elseif instance.savelists then -- engine+progname hash str = gsub(str,"%$","") @@ -7700,9 +9522,9 @@ end function resolvers.expanded_path_list_from_var(str) -- brrr local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$","")) if tmp ~= "" then - return resolvers.expanded_path_list(str) - else return resolvers.expanded_path_list(tmp) + else + return resolvers.expanded_path_list(str) end end @@ -7749,9 +9571,9 @@ function resolvers.isreadable.file(name) local readable = lfs.isfile(name) -- brrr if trace_detail then if readable then - logs.report("fileio","+ readable: %s",name) + logs.report("fileio","file '%s' is readable",name) else - logs.report("fileio","- readable: %s", name) + logs.report("fileio","file '%s' is not readable", name) end end return readable @@ -7767,7 +9589,7 @@ local function collect_files(names) for k=1,#names do local fname = names[k] if trace_detail then - logs.report("fileio","? blobpath asked: %s",fname) + logs.report("fileio","checking name '%s'",fname) end local bname = file.basename(fname) local dname = file.dirname(fname) @@ -7783,7 +9605,7 @@ local function collect_files(names) local files = blobpath and instance.files[blobpath] if files then if trace_detail then - logs.report("fileio",'? blobpath do: %s (%s)',blobpath,bname) + logs.report("fileio","deep checking '%s' (%s)",blobpath,bname) end local blobfile = files[bname] if not blobfile then @@ -7817,7 +9639,7 @@ local function collect_files(names) end end elseif trace_locating then - logs.report("fileio",'! blobpath no: %s (%s)',blobpath,bname) + logs.report("fileio","no match in '%s' (%s)",blobpath,bname) end end end @@ -7867,14 +9689,13 @@ end local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc) local result = collected or { } local stamp = nil - filename = file.collapse_path(filename) -- elsewhere - filename = file.collapse_path(gsub(filename,"\\","/")) -- elsewhere + filename = file.collapse_path(filename) -- speed up / beware: format problem if instance.remember then stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format if instance.found[stamp] then if trace_locating then - logs.report("fileio",'! remembered: %s',filename) + logs.report("fileio","remembering file '%s'",filename) end return instance.found[stamp] end @@ -7882,7 +9703,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not dangerous[instance.format or "?"] then if resolvers.isreadable.file(filename) then if trace_detail then - logs.report("fileio",'= found directly: %s',filename) + logs.report("fileio","file '%s' found directly",filename) end instance.found[stamp] = { filename } return { filename } @@ -7890,13 +9711,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end if find(filename,'%*') then if trace_locating then - logs.report("fileio",'! wildcard: %s', filename) + logs.report("fileio","checking wildcard '%s'", filename) end result = resolvers.find_wildcard_files(filename) elseif file.is_qualified_path(filename) then if resolvers.isreadable.file(filename) then if trace_locating then - logs.report("fileio",'! qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end result = { filename } else @@ -7906,7 +9727,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. ".tex" if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing standard filetype: tex') + logs.report("fileio","no suffix, forcing standard filetype 'tex'") end result, ok = { forcedname }, true end @@ -7916,7 +9737,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. "." .. s if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing format filetype: %s', s) + logs.report("fileio","no suffix, forcing format filetype '%s'", s) end result, ok = { forcedname }, true break @@ -7928,7 +9749,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- try to find in tree (no suffix manipulation), here we search for the -- matching last part of the name local basename = file.basename(filename) - local pattern = (filename .. "$"):gsub("([%.%-])","%%%1") + local pattern = gsub(filename .. "$","([%.%-])","%%%1") local savedformat = instance.format local format = savedformat or "" if format == "" then @@ -7938,19 +9759,21 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan instance.format = "othertextfiles" -- kind of everything, maybe texinput is better end -- - local resolved = collect_instance_files(basename) - if #result == 0 then - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered) + if basename ~= filename then + local resolved = collect_instance_files(basename) + if #result == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered) + end end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if rr:find(pattern) then - result[#result+1], ok = rr, true + resolvers.format = savedformat + -- + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1], ok = rr, true + end end end -- a real wildcard: @@ -7959,14 +9782,14 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- local filelist = collect_files({basename}) -- for f=1,#filelist do -- local ff = filelist[f][3] or "" - -- if ff:find(pattern) then + -- if find(ff,pattern) then -- result[#result+1], ok = ff, true -- end -- end -- end end if not ok and trace_locating then - logs.report("fileio",'? qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end end else @@ -7985,12 +9808,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan wantedfiles[#wantedfiles+1] = forcedname filetype = resolvers.format_of_suffix(forcedname) if trace_locating then - logs.report("fileio",'! forcing filetype: %s',filetype) + logs.report("fileio","forcing filetype '%s'",filetype) end else filetype = resolvers.format_of_suffix(filename) if trace_locating then - logs.report("fileio",'! using suffix based filetype: %s',filetype) + logs.report("fileio","using suffix based filetype '%s'",filetype) end end else @@ -8002,7 +9825,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end filetype = instance.format if trace_locating then - logs.report("fileio",'! using given filetype: %s',filetype) + logs.report("fileio","using given filetype '%s'",filetype) end end local typespec = resolvers.variable_of_format(filetype) @@ -8010,9 +9833,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then - logs.report("fileio",'? filename: %s',filename) - logs.report("fileio",'? filetype: %s',filetype or '?') - logs.report("fileio",'? wanted files: %s',concat(wantedfiles," | ")) + logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end for k=1,#wantedfiles do local fname = wantedfiles[k] @@ -8033,36 +9854,59 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan else -- list search local filelist = collect_files(wantedfiles) - local doscan, recurse + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = file.dirname(filelist[i][2]) .. "/" + end + end if trace_detail then - logs.report("fileio",'? filename: %s',filename) + logs.report("fileio","checking filename '%s'",filename) end -- a bit messy ... esp the doscan setting here + local doscan for k=1,#pathlist do local path = pathlist[k] if find(path,"^!!") then doscan = false else doscan = true end - if find(path,"//$") then recurse = true else recurse = false end local pathname = gsub(path,"^!+", '') done = false -- using file list - if filelist and not (done and not instance.allresults) and recurse then - -- compare list entries with permitted pattern - pathname = gsub(pathname,"([%-%.])","%%%1") -- this also influences - pathname = gsub(pathname,"/+$", '/.*') -- later usage of pathname - pathname = gsub(pathname,"//", '/.-/') -- not ok for /// but harmless - local expr = "^" .. pathname + if filelist then + local expression + -- compare list entries with permitted pattern -- /xx /xx// + if not find(pathname,"/$") then + expression = pathname .. "/" + else + expression = pathname + end + expression = gsub(expression,"([%-%.])","%%%1") -- this also influences + expression = gsub(expression,"//+$", '/.*') -- later usage of pathname + expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless + expression = "^" .. expression .. "$" + if trace_detail then + logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname) + end for k=1,#filelist do local fl = filelist[k] local f = fl[2] - if find(f,expr) then - if trace_detail then - logs.report("fileio",'= found in hash: %s',f) - end + local d = dirlist[k] + if find(d,expression) then --- todo, test for readable result[#result+1] = fl[3] resolvers.register_in_trees(f) -- for tracing used files done = true - if not instance.allresults then break end + if instance.allresults then + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d) + end + else + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d) + end + break + end + elseif trace_detail then + logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d) end end end @@ -8078,7 +9922,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan local fname = file.join(ppname,w) if resolvers.isreadable.file(fname) then if trace_detail then - logs.report("fileio",'= found by scanning: %s',fname) + logs.report("fileio","found '%s' by scanning",fname) end result[#result+1] = fname done = true @@ -8141,7 +9985,7 @@ function resolvers.find_given_files(filename) local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] - local files = instance.files[hash.tag] + local files = instance.files[hash.tag] or { } local blist = files[bname] if not blist then local rname = "remap:"..bname @@ -8251,9 +10095,9 @@ function resolvers.load(option) statistics.starttiming(instance) resolvers.resetconfig() resolvers.identify_cnf() - resolvers.load_lua() + resolvers.load_lua() -- will become the new method resolvers.expand_variables() - resolvers.load_cnf() + resolvers.load_cnf() -- will be skipped when we have a lua file resolvers.expand_variables() if option ~= "nofiles" then resolvers.load_hash() @@ -8265,22 +10109,23 @@ end function resolvers.for_files(command, files, filetype, mustexist) if files and #files > 0 then local function report(str) - if trace_verbose then + if trace_locating then logs.report("fileio",str) -- has already verbose else print(str) end end - if trace_verbose then - report('') + if trace_locating then + report('') -- ? end - for _, file in ipairs(files) do + for f=1,#files do + local file = files[f] local result = command(file,filetype,mustexist) if type(result) == 'string' then report(result) else - for _,v in ipairs(result) do - report(v) + for i=1,#result do + report(result[i]) -- could be unpack end end end @@ -8327,18 +10172,19 @@ end function table.sequenced(t,sep) -- temp here local s = { } - for k, v in pairs(t) do -- pairs? - s[#s+1] = k .. "=" .. v + for k, v in next, t do -- indexed? + s[#s+1] = k .. "=" .. tostring(v) end return concat(s, sep or " | ") end function resolvers.methodhandler(what, filename, filetype) -- ... + filename = file.collapse_path(filename) local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb local scheme = specification.scheme if resolvers[what][scheme] then if trace_locating then - logs.report("fileio",'= handler: %s -> %s -> %s',specification.original,what,table.sequenced(specification)) + logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) end return resolvers[what][scheme](filename,filetype) -- todo: specification else @@ -8358,8 +10204,9 @@ function resolvers.clean_path(str) end function resolvers.do_with_path(name,func) - for _, v in pairs(resolvers.expanded_path_list(name)) do -- pairs? - func("^"..resolvers.clean_path(v)) + local pathlist = resolvers.expanded_path_list(name) + for i=1,#pathlist do + func("^"..resolvers.clean_path(pathlist[i])) end end @@ -8368,7 +10215,9 @@ function resolvers.do_with_var(name,func) end function resolvers.with_files(pattern,handle) - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for i=1,#hashes do + local hash = hashes[i] local blobpath = hash.tag local blobtype = hash.type if blobpath then @@ -8383,7 +10232,7 @@ function resolvers.with_files(pattern,handle) if type(v) == "string" then handle(blobtype,blobpath,v,k) else - for _,vv in pairs(v) do -- ipairs? + for _,vv in next, v do -- indexed handle(blobtype,blobpath,vv,k) end end @@ -8395,7 +10244,7 @@ function resolvers.with_files(pattern,handle) end function resolvers.locate_format(name) - local barename, fmtname = name:gsub("%.%a+$",""), "" + local barename, fmtname = gsub(name,"%.%a+$",""), "" if resolvers.usecache then local path = file.join(caches.setpath("formats")) -- maybe platform fmtname = file.join(path,barename..".fmt") or "" @@ -8443,7 +10292,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8467,7 +10316,7 @@ luatools with a recache feature.

local format, lower, gsub = string.format, string.lower, string.gsub -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet caches = caches or { } @@ -8554,7 +10403,8 @@ function caches.setpath(...) caches.path = '.' end caches.path = resolvers.clean_path(caches.path) - if not table.is_empty({...}) then + local dirs = { ... } + if #dirs > 0 then local pth = dir.mkdirs(caches.path,...) return pth end @@ -8600,6 +10450,7 @@ function caches.savedata(filepath,filename,data,raw) if raw then reduce, simplify = false, false end + data.cache_uuid = os.uuid() if caches.direct then file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex else @@ -8625,7 +10476,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-res'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8660,6 +10511,14 @@ prefixes.relative = function(str,n) return resolvers.clean_path(str) end +prefixes.auto = function(str) + local fullname = prefixes.relative(str) + if not lfs.isfile(fullname) then + fullname = prefixes.locate(str) + end + return fullname +end + prefixes.locate = function(str) local fullname = resolvers.find_given_file(str) or "" return resolvers.clean_path((fullname ~= "" and fullname) or str) @@ -8683,6 +10542,16 @@ prefixes.full = prefixes.locate prefixes.file = prefixes.filename prefixes.path = prefixes.pathname +function resolvers.allprefixes(separator) + local all = table.sortedkeys(prefixes) + if separator then + for i=1,#all do + all[i] = all[i] .. ":" + end + end + return all +end + local function _resolve_(method,target) if prefixes[method] then return prefixes[method](target) @@ -8693,7 +10562,8 @@ end local function resolve(str) if type(str) == "table" then - for k, v in pairs(str) do -- ipairs + for k=1,#str do + local v = str[k] str[k] = resolve(v) or v end elseif str and str ~= "" then @@ -8706,7 +10576,7 @@ resolvers.resolve = resolve if os.uname then - for k, v in pairs(os.uname()) do + for k, v in next, os.uname() do if not prefixes[k] then prefixes[k] = function() return v end end @@ -8721,7 +10591,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8742,7 +10612,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-out'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8758,7 +10628,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-con'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8769,8 +10639,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) --[[ldx--

Once we found ourselves defining similar cache constructs @@ -8834,7 +10702,7 @@ end function containers.is_valid(container, name) if name and name ~= "" then local storage = container.storage[name] - return storage and not table.is_empty(storage) and storage.cache_version == container.version + return storage and storage.cache_version == container.version else return false end @@ -8886,16 +10754,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-use'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, lower, gsub = string.format, string.lower, string.gsub +local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -- since we want to use the cache instead of the tree, we will now -- reimplement the saver. @@ -8939,19 +10806,20 @@ resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT')) - if table.is_empty(mountpaths) and usecache then + if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = { caches.setpath("mount") } end - if not table.is_empty(mountpaths) then + if mountpaths and #mountpaths > 0 then statistics.starttiming(resolvers.instance) - for k, root in pairs(mountpaths) do + for k=1,#mountpaths do + local root = mountpaths[k] local f = io.open(root.."/url.tmi") if f then for line in f:lines() do if line then - if line:find("^[%%#%-]") then -- or %W + if find(line,"^[%%#%-]") then -- or %W -- skip - elseif line:find("^zip://") then + elseif find(line,"^zip://") then if trace_locating then logs.report("fileio","mounting %s",line) end @@ -8996,11 +10864,13 @@ function statistics.check_fmt_status(texname) local luv = dofile(luvname) if luv and luv.sourcefile then local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown") - if luv.enginebanner and luv.enginebanner ~= enginebanner then - return "engine mismatch" + local luvbanner = luv.enginebanner or "?" + if luvbanner ~= enginebanner then + return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner) end - if luv.sourcehash and luv.sourcehash ~= sourcehash then - return "source mismatch" + local luvhash = luv.sourcehash or "?" + if luvhash ~= sourcehash then + return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash) end else return "invalid status file" @@ -9019,18 +10889,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-zip'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, find = string.format, string.find +local format, find, match = string.format, string.find, string.match +local unpack = unpack or table.unpack -local trace_locating, trace_verbose = false, false +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trace_verbose = v end) +-- zip:///oeps.zip?name=bla/bla.tex +-- zip:///oeps.zip?tree=tex/texmf-local +-- zip:///texmf.zip?tree=/tex/texmf +-- zip:///texmf.zip?tree=/tex/texmf-local +-- zip:///texmf-mine.zip?tree=/tex/texmf-projects zip = zip or { } zip.archives = zip.archives or { } @@ -9041,9 +10915,6 @@ local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, local archives = zip.archives --- zip:///oeps.zip?name=bla/bla.tex --- zip:///oeps.zip?tree=tex/texmf-local - local function validzip(str) -- todo: use url splitter if not find(str,"^zip://") then return "zip:///" .. str @@ -9073,26 +10944,22 @@ function zip.closearchive(name) end end --- zip:///texmf.zip?tree=/tex/texmf --- zip:///texmf.zip?tree=/tex/texmf-local --- zip:///texmf-mine.zip?tree=/tex/texmf-projects - function locators.zip(specification) -- where is this used? startup zips (untested) specification = resolvers.splitmethod(specification) local zipfile = specification.path local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree if trace_locating then if zfile then - logs.report("fileio",'! zip locator, found: %s',specification.original) + logs.report("fileio","zip locator, archive '%s' found",specification.original) else - logs.report("fileio",'? zip locator, not found: %s',specification.original) + logs.report("fileio","zip locator, archive '%s' not found",specification.original) end end end function hashers.zip(tag,name) - if trace_verbose then - logs.report("fileio","loading zip file %s as %s",name,tag) + if trace_locating then + logs.report("fileio","loading zip file '%s' as '%s'",name,tag) end resolvers.usezipfile(format("%s?tree=%s",tag,name)) end @@ -9117,23 +10984,25 @@ function finders.zip(specification,filetype) local zfile = zip.openarchive(specification.path) if zfile then if trace_locating then - logs.report("fileio",'! zip finder, path: %s',specification.path) + logs.report("fileio","zip finder, archive '%s' found",specification.path) end local dfile = zfile:open(q.name) if dfile then dfile = zfile:close() if trace_locating then - logs.report("fileio",'+ zip finder, name: %s',q.name) + logs.report("fileio","zip finder, file '%s' found",q.name) end return specification.original + elseif trace_locating then + logs.report("fileio","zip finder, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'? zip finder, path %s',specification.path) + logs.report("fileio","zip finder, unknown archive '%s'",specification.path) end end end if trace_locating then - logs.report("fileio",'- zip finder, name: %s',filename) + logs.report("fileio","zip finder, '%s' not found",filename) end return unpack(finders.notfound) end @@ -9146,20 +11015,25 @@ function openers.zip(specification) local zfile = zip.openarchive(zipspecification.path) if zfile then if trace_locating then - logs.report("fileio",'+ zip starter, path: %s',zipspecification.path) + logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path) end local dfile = zfile:open(q.name) if dfile then logs.show_open(specification) + if trace_locating then + logs.report("fileio","zip opener, file '%s' found",q.name) + end return openers.text_opener(specification,dfile,'zip') + elseif trace_locating then + logs.report("fileio","zip opener, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'- zip starter, path %s',zipspecification.path) + logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path) end end end if trace_locating then - logs.report("fileio",'- zip opener, name: %s',filename) + logs.report("fileio","zip opener, '%s' not found",filename) end return unpack(openers.notfound) end @@ -9172,25 +11046,27 @@ function loaders.zip(specification) local zfile = zip.openarchive(specification.path) if zfile then if trace_locating then - logs.report("fileio",'+ zip starter, path: %s',specification.path) + logs.report("fileio","zip loader, archive '%s' opened",specification.path) end local dfile = zfile:open(q.name) if dfile then logs.show_load(filename) if trace_locating then - logs.report("fileio",'+ zip loader, name: %s',filename) + logs.report("fileio","zip loader, file '%s' loaded",filename) end local s = dfile:read("*all") dfile:close() return true, s, #s + elseif trace_locating then + logs.report("fileio","zip loader, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'- zip starter, path: %s',specification.path) + logs.report("fileio","zip loader, unknown archive '%s'",specification.path) end end end if trace_locating then - logs.report("fileio",'- zip loader, name: %s',filename) + logs.report("fileio","zip loader, '%s' not found",filename) end return unpack(openers.notfound) end @@ -9200,21 +11076,15 @@ end function resolvers.usezipfile(zipname) zipname = validzip(zipname) - if trace_locating then - logs.report("fileio",'! zip use, file: %s',zipname) - end local specification = resolvers.splitmethod(zipname) local zipfile = specification.path if zipfile and not zip.registeredfiles[zipname] then local tree = url.query(specification.query).tree or "" - if trace_locating then - logs.report("fileio",'! zip register, file: %s',zipname) - end local z = zip.openarchive(zipfile) if z then local instance = resolvers.instance if trace_locating then - logs.report("fileio","= zipfile, registering: %s",zipname) + logs.report("fileio","zip registering, registering archive '%s'",zipname) end statistics.starttiming(instance) resolvers.prepend_hash('zip',zipname,zipfile) @@ -9223,10 +11093,10 @@ function resolvers.usezipfile(zipname) instance.files[zipname] = resolvers.register_zip_file(z,tree or "") statistics.stoptiming(instance) elseif trace_locating then - logs.report("fileio","? zipfile, unknown: %s",zipname) + logs.report("fileio","zip registering, unknown archive '%s'",zipname) end elseif trace_locating then - logs.report("fileio",'! zip register, no file: %s',zipname) + logs.report("fileio","zip registering, '%s' not found",zipname) end end @@ -9238,11 +11108,11 @@ function resolvers.register_zip_file(z,tree) filter = format("^%s/(.+)/(.-)$",tree) end if trace_locating then - logs.report("fileio",'= zip filter: %s',filter) + logs.report("fileio","zip registering, using filter '%s'",filter) end local register, n = resolvers.register_file, 0 for i in z:files() do - local path, name = i.filename:match(filter) + local path, name = match(i.filename,filter) if path then if name and name ~= '' then register(files, name, path) @@ -9255,7 +11125,7 @@ function resolvers.register_zip_file(z,tree) n = n + 1 end end - logs.report("fileio",'= zip entries: %s',n) + logs.report("fileio","zip registering, %s files registered",n) return files end @@ -9266,12 +11136,14 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-crl'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local gsub = string.gsub + curl = curl or { } curl.cached = { } @@ -9280,9 +11152,9 @@ curl.cachepath = caches.definepath("curl") local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders function curl.fetch(protocol, name) - local cachename = curl.cachepath() .. "/" .. name:gsub("[^%a%d%.]+","-") --- cachename = cachename:gsub("[\\/]", io.fileseparator) - cachename = cachename:gsub("[\\]", "/") -- cleanup + local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-") +-- cachename = gsub(cachename,"[\\/]", io.fileseparator) + cachename = gsub(cachename,"[\\]", "/") -- cleanup if not curl.cached[name] then if not io.exists(cachename) then curl.cached[name] = cachename @@ -9324,6 +11196,164 @@ curl.install('https') curl.install('ftp') +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['data-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- some loading stuff ... we might move this one to slot 2 depending +-- on the developments (the loaders must not trigger kpse); we could +-- of course use a more extensive lib path spec + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local gsub, insert = string.gsub, table.insert +local unpack = unpack or table.unpack + +local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs' +local clibformats = { 'lib' } + +local _path_, libpaths, _cpath_, clibpaths + +function package.libpaths() + if not _path_ or package.path ~= _path_ then + _path_ = package.path + libpaths = file.split_path(_path_,";") + end + return libpaths +end + +function package.clibpaths() + if not _cpath_ or package.cpath ~= _cpath_ then + _cpath_ = package.cpath + clibpaths = file.split_path(_cpath_,";") + end + return clibpaths +end + +local function thepath(...) + local t = { ... } t[#t+1] = "?.lua" + local path = file.join(unpack(t)) + if trace_locating then + logs.report("fileio","! appending '%s' to 'package.path'",path) + end + return path +end + +local p_libpaths, a_libpaths = { }, { } + +function package.append_libpath(...) + insert(a_libpath,thepath(...)) +end + +function package.prepend_libpath(...) + insert(p_libpaths,1,thepath(...)) +end + +-- beware, we need to return a loadfile result ! + +local function loaded(libpaths,name,simple) + for i=1,#libpaths do -- package.path, might become option + local libpath = libpaths[i] + local resolved = gsub(libpath,"%?",simple) + if trace_locating then -- more detail + logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved) + end + return loadfile(resolved) + end + end +end + + +package.loaders[2] = function(name) -- was [#package.loaders+1] + if trace_locating then -- mode detail + logs.report("fileio","! locating '%s'",name) + end + for i=1,#libformats do + local format = libformats[i] + local resolved = resolvers.find_file(name,format) or "" + if trace_locating then -- mode detail + logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format) + end + if resolved ~= "" then + if trace_locating then + logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved) + end + return loadfile(resolved) + end + end + -- libpaths + local libpaths, clibpaths = package.libpaths(), package.clibpaths() + local simple = gsub(name,"%.lua$","") + local simple = gsub(simple,"%.","/") + local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple) + if resolved then + return resolved + end + -- + local libname = file.addsuffix(simple,os.libsuffix) + for i=1,#clibformats do + -- better have a dedicated loop + local format = clibformats[i] + local paths = resolvers.expanded_path_list_from_var(format) + for p=1,#paths do + local path = paths[p] + local resolved = file.join(path,libname) + if trace_locating then -- mode detail + logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved) + end + return package.loadlib(resolved,name) + end + end + end + for i=1,#clibpaths do -- package.path, might become option + local libpath = clibpaths[i] + local resolved = gsub(libpath,"?",simple) + if trace_locating then -- more detail + logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved) + end + return package.loadlib(resolved,name) + end + end + -- just in case the distribution is messed up + if trace_loading then -- more detail + logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name) + end + local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or "" + if resolved ~= "" then + if trace_locating then + logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved) + end + return loadfile(resolved) + end + if trace_locating then + logs.report("fileio",'? unable to locate lib: %s',name) + end +-- return "unable to locate " .. name +end + +resolvers.loadlualib = require + + end -- of closure do -- create closure to overcome 200 locals limit @@ -9437,7 +11467,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -9445,47 +11475,47 @@ if not modules then modules = { } end modules ['data-aux'] = { local find = string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") local oldscript = resolvers.clean_path(oldname) - if trace_verbose then + if trace_locating then logs.report("fileio","to be replaced old script %s", oldscript) end local newscripts = resolvers.find_files(newname) or { } if #newscripts == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","unable to locate new script") end else for i=1,#newscripts do local newscript = resolvers.clean_path(newscripts[i]) - if trace_verbose then + if trace_locating then logs.report("fileio","checking new script %s", newscript) end if oldscript == newscript then - if trace_verbose then + if trace_locating then logs.report("fileio","old and new script are the same") end elseif not find(newscript,scriptpath) then - if trace_verbose then + if trace_locating then logs.report("fileio","new script should come from %s",scriptpath) end elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then - if trace_verbose then + if trace_locating then logs.report("fileio","invalid new script name") end else local newdata = io.loaddata(newscript) if newdata then - if trace_verbose then + if trace_locating then logs.report("fileio","old script content replaced by new content") end io.savedata(oldscript,newdata) break - elseif trace_verbose then + elseif trace_locating then logs.report("fileio","unable to load new script") end end @@ -9500,25 +11530,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local find, gsub, match = string.find, string.gsub, string.match +local getenv, setenv = os.getenv, os.setenv + -- loads *.tmf files in minimal tree roots (to be optimized and documented) function resolvers.check_environment(tree) logs.simpleline() - os.setenv('TMP', os.getenv('TMP') or os.getenv('TEMP') or os.getenv('TMPDIR') or os.getenv('HOME')) - os.setenv('TEXOS', os.getenv('TEXOS') or ("texmf-" .. os.currentplatform())) - os.setenv('TEXPATH', (tree or "tex"):gsub("\/+$",'')) - os.setenv('TEXMFOS', os.getenv('TEXPATH') .. "/" .. os.getenv('TEXOS')) + setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME')) + setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform)) + setenv('TEXPATH', gsub(tree or "tex","\/+$",'')) + setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS')) logs.simpleline() - logs.simple("preset : TEXPATH => %s", os.getenv('TEXPATH')) - logs.simple("preset : TEXOS => %s", os.getenv('TEXOS')) - logs.simple("preset : TEXMFOS => %s", os.getenv('TEXMFOS')) - logs.simple("preset : TMP => %s", os.getenv('TMP')) + logs.simple("preset : TEXPATH => %s", getenv('TEXPATH')) + logs.simple("preset : TEXOS => %s", getenv('TEXOS')) + logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS')) + logs.simple("preset : TMP => %s", getenv('TMP')) logs.simple('') end @@ -9526,27 +11559,27 @@ function resolvers.load_environment(name) -- todo: key=value as well as lua local f = io.open(name) if f then for line in f:lines() do - if line:find("^[%%%#]") then + if find(line,"^[%%%#]") then -- skip comment else - local key, how, value = line:match("^(.-)%s*([<=>%?]+)%s*(.*)%s*$") + local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$") if how then - value = value:gsub("%%(.-)%%", function(v) return os.getenv(v) or "" end) + value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end) if how == "=" or how == "<<" then - os.setenv(key,value) + setenv(key,value) elseif how == "?" or how == "??" then - os.setenv(key,os.getenv(key) or value) + setenv(key,getenv(key) or value) elseif how == "<" or how == "+=" then - if os.getenv(key) then - os.setenv(key,os.getenv(key) .. io.fileseparator .. value) + if getenv(key) then + setenv(key,getenv(key) .. io.fileseparator .. value) else - os.setenv(key,value) + setenv(key,value) end elseif how == ">" or how == "=+" then - if os.getenv(key) then - os.setenv(key,value .. io.pathseparator .. os.getenv(key)) + if getenv(key) then + setenv(key,value .. io.pathseparator .. getenv(key)) else - os.setenv(key,value) + setenv(key,value) end end end @@ -9585,6 +11618,9 @@ if not modules then modules = { } end modules ['luat-sta'] = { -- this code is used in the updater +local gmatch, match = string.gmatch, string.match +local type = type + states = states or { } states.data = states.data or { } states.hash = states.hash or { } @@ -9613,13 +11649,17 @@ function states.set_by_tag(tag,key,value,default,persistent) if d then if type(d) == "table" then local dkey, hkey = key, key - local pre, post = key:match("(.+)%.([^%.]+)$") + local pre, post = match(key,"(.+)%.([^%.]+)$") if pre and post then - for k in pre:gmatch("[^%.]+") do + for k in gmatch(pre,"[^%.]+") do local dk = d[k] if not dk then dk = { } d[k] = dk + elseif type(dk) == "string" then + -- invalid table, unable to upgrade structure + -- hope for the best or delete the state file + break end d = dk end @@ -9647,7 +11687,7 @@ function states.get_by_tag(tag,key,default) else local d = states.data[tag] if d then - for k in key:gmatch("[^%.]+") do + for k in gmatch(key,"[^%.]+") do local dk = d[k] if dk then d = dk @@ -9782,6 +11822,7 @@ own.libs = { -- todo: check which ones are really needed 'l-os.lua', 'l-file.lua', 'l-md5.lua', + 'l-url.lua', 'l-dir.lua', 'l-boolean.lua', 'l-math.lua', @@ -9790,11 +11831,13 @@ own.libs = { -- todo: check which ones are really needed 'l-utils.lua', 'l-aux.lua', -- 'l-xml.lua', + 'trac-tra.lua', 'lxml-tab.lua', - 'lxml-pth.lua', - 'lxml-ent.lua', + 'lxml-lpt.lua', +-- 'lxml-ent.lua', 'lxml-mis.lua', - 'trac-tra.lua', + 'lxml-aux.lua', + 'lxml-xml.lua', 'luat-env.lua', 'trac-inf.lua', 'trac-log.lua', @@ -9809,7 +11852,7 @@ own.libs = { -- todo: check which ones are really needed -- 'data-bin.lua', 'data-zip.lua', 'data-crl.lua', --- 'data-lua.lua', + 'data-lua.lua', 'data-kps.lua', -- so that we can replace kpsewhich 'data-aux.lua', -- updater 'data-tmf.lua', -- tree files @@ -9827,7 +11870,8 @@ end -- End of hack. -own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua' +own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua' + own.path = string.match(own.name,"^(.+)[\\/].-$") or "." own.list = { '.' } @@ -9865,18 +11909,25 @@ if not resolvers then os.exit() end -logs.setprogram('MTXrun',"TDS Runner Tool 1.22",environment.arguments["verbose"] or false) +logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false) local instance = resolvers.reset() +local trackspec = environment.argument("trackers") or environment.argument("track") + +if trackspec then + trackers.enable(trackspec) +end + runners = runners or { } -- global messages = messages or { } messages.help = [[ ---script run an mtx script (--noquotes) ---execute run a script or program (--noquotes) +--script run an mtx script (lua prefered method) (--noquotes), no script gives list +--execute run a script or program (texmfstart method) (--noquotes) --resolve resolve prefixed arguments --ctxlua run internally (using preloaded libs) +--internal run script using built in libraries (same as --ctxlua) --locate locate given filename --autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree' @@ -9893,16 +11944,20 @@ messages.help = [[ --unix create unix (linux) stubs --verbose give a bit more info +--trackers=list enable given trackers --engine=str target engine --progname=str format or backend --edit launch editor with found file --launch (--all) launch files like manuals, assumes os support ---intern run script using built in libraries +--timedrun run a script an time its run +--autogenerate regenerate databases if needed (handy when used to run context in an editor) + +--usekpse use kpse as fallback (when no mkiv and cache installed, often slower) +--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality) ---usekpse use kpse as fallback (when no mkiv and cache installed, often slower) ---forcekpse force using kpse (handy when no mkiv and cache installed but less functionality) +--prefixes show supported prefixes ]] runners.applications = { @@ -9918,20 +11973,17 @@ runners.suffixes = { } runners.registered = { - texexec = { 'texexec.rb', true }, -- context mkii runner (only tool not to be luafied) + texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied) texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it) texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied - -- texwork = { \texwork.pl', false }, -- perltk based editing environment, only used at pragma - + -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma makempy = { 'makempy.pl', true }, mptopdf = { 'mptopdf.pl', true }, pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced) - -- examplex = { 'examplex.rb', false }, concheck = { 'concheck.rb', false }, - runtools = { 'runtools.rb', true }, textools = { 'textools.rb', true }, tmftools = { 'tmftools.rb', true }, @@ -9943,7 +11995,6 @@ runners.registered = { xmltools = { 'xmltools.rb', true }, -- luatools = { 'luatools.lua', true }, mtxtools = { 'mtxtools.rb', true }, - pdftrimwhite = { 'pdftrimwhite.pl', false } } @@ -9952,6 +12003,13 @@ runners.launchers = { unix = { } } +-- like runners.libpath("framework"): looks on script's subpath + +function runners.libpath(...) + package.prepend_libpath(file.dirname(environment.ownscript),...) + package.prepend_libpath(file.dirname(environment.ownname) ,...) +end + function runners.prepare() local checkname = environment.argument("ifchanged") if checkname and checkname ~= "" then @@ -9996,7 +12054,7 @@ function runners.prepare() return "run" end -function runners.execute_script(fullname,internal) +function runners.execute_script(fullname,internal,nosplit) local noquote = environment.argument("noquotes") if fullname and fullname ~= "" then local state = runners.prepare() @@ -10036,17 +12094,20 @@ function runners.execute_script(fullname,internal) end end if result and result ~= "" then - local before, after = environment.split_arguments(fullname) -- already done - environment.arguments_before, environment.arguments_after = before, after + if not no_split then + local before, after = environment.split_arguments(fullname) -- already done + environment.arguments_before, environment.arguments_after = before, after + end if internal then - arg = { } for _,v in pairs(after) do arg[#arg+1] = v end + arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end + environment.ownscript = result dofile(result) else local binary = runners.applications[file.extname(result)] if binary and binary ~= "" then result = binary .. " " .. result end - local command = result .. " " .. environment.reconstruct_commandline(after,noquote) + local command = result .. " " .. environment.reconstruct_commandline(environment.arguments_after,noquote) if logs.verbose then logs.simpleline() logs.simple("executing: %s",command) @@ -10054,8 +12115,24 @@ function runners.execute_script(fullname,internal) logs.simpleline() io.flush() end - local code = os.exec(command) -- maybe spawn - return code == 0 + -- no os.exec because otherwise we get the wrong return value + local code = os.execute(command) -- maybe spawn + if code == 0 then + return true + else + if binary then + binary = file.addsuffix(binary,os.binsuffix) + for p in string.gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do + if lfs.isfile(file.join(p,binary)) then + return false + end + end + logs.simpleline() + logs.simple("This script needs '%s' which seems not to be installed.",binary) + logs.simpleline() + end + return false + end end end end @@ -10088,7 +12165,7 @@ function runners.execute_program(fullname) return false end --- the --usekpse flag will fallback on kpse +-- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs) local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010' local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010' @@ -10143,7 +12220,7 @@ function runners.locate_file(filename) end function runners.locate_platform() - runners.report_location(os.currentplatform()) + runners.report_location(os.platform) end function runners.report_location(result) @@ -10176,7 +12253,8 @@ end function runners.save_script_session(filename, list) local t = { } - for _, key in ipairs(list) do + for i=1,#list do + local key = list[i] t[key] = environment.arguments[key] end io.savedata(filename,table.serialize(t,true)) @@ -10265,20 +12343,22 @@ function runners.find_mtx_script(filename) if fullname and fullname ~= "" then return fullname end + -- mtx- prefix checking + local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-" -- context namespace, mtx- - fullname = "mtx-" .. filename + fullname = mtxprefix .. filename fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname end -- context namespace, mtx-s - fullname = "mtx-" .. basename .. "s" .. "." .. suffix + fullname = mtxprefix .. basename .. "s" .. "." .. suffix fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname end -- context namespace, mtx- - fullname = "mtx-" .. basename:gsub("s$","") .. "." .. suffix + fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname @@ -10288,9 +12368,17 @@ function runners.find_mtx_script(filename) return fullname end -function runners.execute_ctx_script(filename,arguments) +function runners.execute_ctx_script(filename) + local arguments = environment.arguments_after local fullname = runners.find_mtx_script(filename) or "" - -- retyr after generate but only if --autogenerate + if file.extname(fullname) == "cld" then + -- handy in editors where we force --autopdf + logs.simple("running cld script: %s",filename) + table.insert(arguments,1,fullname) + table.insert(arguments,"--autopdf") + fullname = runners.find_mtx_script("context") or "" + end + -- retry after generate but only if --autogenerate if fullname == "" and environment.argument("autogenerate") then -- might become the default instance.renewcache = true logs.setverbose(true) @@ -10319,32 +12407,51 @@ function runners.execute_ctx_script(filename,arguments) if logs.verbose then logs.simple("using script: %s\n",fullname) end + environment.ownscript = fullname dofile(fullname) local savename = environment.arguments['save'] - if savename and runners.save_list and not table.is_empty(runners.save_list or { }) then - if type(savename) ~= "string" then savename = file.basename(fullname) end - savename = file.replacesuffix(savename,"cfg") - runners.save_script_session(savename, runners.save_list) + if savename then + local save_list = runners.save_list + if save_list and next(save_list) then + if type(savename) ~= "string" then savename = file.basename(fullname) end + savename = file.replacesuffix(savename,"cfg") + runners.save_script_session(savename,save_list) + end end return true end else - logs.setverbose(true) - if filename == "" then - logs.simple("unknown script, no name given") + -- logs.setverbose(true) + if filename == "" or filename == "help" then local context = resolvers.find_file("mtx-context.lua") + logs.setverbose(true) if context ~= "" then local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed local valid = { } - for _, scriptname in ipairs(result) do - scriptname = string.match(scriptname,".*mtx%-([^%-]-)%.lua") - if scriptname then - valid[#valid+1] = scriptname + table.sort(result) + for i=1,#result do + local scriptname = result[i] + local scriptbase = string.match(scriptname,".*mtx%-([^%-]-)%.lua") + if scriptbase then + local data = io.loaddata(scriptname) + local banner, version = string.match(data,"[\n\r]logs%.extendbanner%s*%(%s*[\"\']([^\n\r]+)%s*(%d+%.%d+)") + if banner then + valid[#valid+1] = { scriptbase, version, banner } + end end end if #valid > 0 then - logs.simple("known scripts: %s",table.concat(valid,", ")) + logs.reportbanner() + logs.reportline() + logs.simple("no script name given, known scripts:") + logs.simple() + for k=1,#valid do + local v = valid[k] + logs.simple("%-12s %4s %s",v[1],v[2],v[3]) + end end + else + logs.simple("no script name given") end else filename = file.addsuffix(filename,"lua") @@ -10358,6 +12465,12 @@ function runners.execute_ctx_script(filename,arguments) end end +function runners.prefixes() + logs.reportbanner() + logs.reportline() + logs.simple(table.concat(resolvers.allprefixes(true)," ")) +end + function runners.timedrun(filename) -- just for me if filename and filename ~= "" then runners.timed(function() os.execute(filename) end) @@ -10385,7 +12498,9 @@ instance.lsrmode = environment.argument("lsr") or false -- maybe the unset has to go to this level -if environment.argument("usekpse") or environment.argument("forcekpse") then +local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))] + +if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then os.setenv("engine","") os.setenv("progname","") @@ -10420,7 +12535,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") then return (kpse_initialized():show_path(name)) or "" end - elseif environment.argument("usekpse") then + elseif environment.argument("usekpse") or is_mkii_stub then resolvers.load() @@ -10449,7 +12564,6 @@ else end - if environment.argument("selfmerge") then -- embed used libraries utils.merger.selfmerge(own.name,own.libs,own.list) @@ -10462,9 +12576,14 @@ elseif environment.argument("selfupdate") then elseif environment.argument("ctxlua") or environment.argument("internal") then -- run a script by loading it (using libs) ok = runners.execute_script(filename,true) -elseif environment.argument("script") or environment.argument("s") then +elseif environment.argument("script") or environment.argument("scripts") then -- run a script by loading it (using libs), pass args - ok = runners.execute_ctx_script(filename,after) + if is_mkii_stub then + -- execute mkii script + ok = runners.execute_script(filename,false,true) + else + ok = runners.execute_ctx_script(filename) + end elseif environment.argument("execute") then -- execute script ok = runners.execute_script(filename) @@ -10491,6 +12610,8 @@ elseif environment.argument("locate") then elseif environment.argument("platform")then -- locate platform runners.locate_platform() +elseif environment.argument("prefixes") then + runners.prefixes() elseif environment.argument("timedrun") then -- locate platform runners.timedrun(filename) @@ -10499,8 +12620,14 @@ elseif environment.argument("help") or filename=='help' or filename == "" then -- execute script elseif filename:find("^bin:") then ok = runners.execute_program(filename) +elseif is_mkii_stub then + -- execute mkii script + ok = runners.execute_script(filename,false,true) else - ok = runners.execute_script(filename) + ok = runners.execute_ctx_script(filename) + if not ok then + ok = runners.execute_script(filename) + end end if os.platform == "unix" then diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxtools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/mtxtools.bat deleted file mode 100755 index 2e0fd35080d..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/mtxtools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute mtxtools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/mtxworks.cmd b/Master/texmf-dist/scripts/context/stubs/mswin/mtxworks.cmd deleted file mode 100644 index 322d9464dd5..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/mtxworks.cmd +++ /dev/null @@ -1 +0,0 @@ -mtxrun --script texworks --start diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/pdftools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/pdftools.bat deleted file mode 100755 index 94c8cf3bd79..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/pdftools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute pdftools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/pstopdf.bat b/Master/texmf-dist/scripts/context/stubs/mswin/pstopdf.bat deleted file mode 100755 index ebc3cb690a7..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/pstopdf.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute pstopdf.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/rlxtools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/rlxtools.bat deleted file mode 100755 index 5069c935f3c..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/rlxtools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute rlxtools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/runtools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/runtools.bat deleted file mode 100755 index 2f89413d436..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/runtools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute runtools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/texexec.bat b/Master/texmf-dist/scripts/context/stubs/mswin/texexec.bat deleted file mode 100755 index 6e8b80aabce..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/texexec.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute texexec.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/texexec.cmd b/Master/texmf-dist/scripts/context/stubs/mswin/texexec.cmd deleted file mode 100644 index acbe41fd82b..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/texexec.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute texexec.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/texexec.exe b/Master/texmf-dist/scripts/context/stubs/mswin/texexec.exe new file mode 100644 index 00000000000..2d45f27494d Binary files /dev/null and b/Master/texmf-dist/scripts/context/stubs/mswin/texexec.exe differ diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/texfont.bat b/Master/texmf-dist/scripts/context/stubs/mswin/texfont.bat deleted file mode 100755 index 498d4d85b51..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/texfont.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute texfont.pl %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.cmd b/Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.cmd deleted file mode 100644 index 47a10cc54cd..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.exe b/Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.exe new file mode 100644 index 00000000000..2d45f27494d Binary files /dev/null and b/Master/texmf-dist/scripts/context/stubs/mswin/texmfstart.exe differ diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/textools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/textools.bat deleted file mode 100755 index b4cdfa1b332..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/textools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute textools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/texutil.bat b/Master/texmf-dist/scripts/context/stubs/mswin/texutil.bat deleted file mode 100755 index 8b718640dcf..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/texutil.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute texutil.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/tmftools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/tmftools.bat deleted file mode 100755 index 1f5d8022280..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/tmftools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute tmftools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/mswin/xmltools.bat b/Master/texmf-dist/scripts/context/stubs/mswin/xmltools.bat deleted file mode 100755 index 41f4ec30465..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/mswin/xmltools.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -setlocal -set ownpath=%~dp0% -texlua "%ownpath%mtxrun.lua" --usekpse --execute xmltools.rb %* -endlocal diff --git a/Master/texmf-dist/scripts/context/stubs/source/mtxrun_dll.c b/Master/texmf-dist/scripts/context/stubs/source/mtxrun_dll.c new file mode 100644 index 00000000000..5b7cd31a07b --- /dev/null +++ b/Master/texmf-dist/scripts/context/stubs/source/mtxrun_dll.c @@ -0,0 +1,221 @@ +/************************************************************************ + + Copyright: + + Public Domain + Originally written in 2010 by Tomasz M. Trzeciak and Hans Hagen + + This program is derived from the 'runscript' program originally + written in 2009 by T.M. Trzeciak. It has been adapted for use in + ConTeXt MkIV. + + Comment: + + In ConTeXt MkIV we have two core scripts: luatools.lua and + mtxrun.lua where the second one is used to launch other scripts. + Normally a user will use a call like: + + mtxrun --script font --reload + + Here mtxrun is a lua script. In order to avoid the usage of a cmd + file on windows this runner will start texlua directly. If the + shared library luatex.dll is available, texlua will be started in + the same process avoiding thus any additional overhead. Otherwise + it will be spawned in a new proces. + + We also don't want to use other runners, like those that use kpse + to locate the script as this is exactly what mtxrun itself is doing + already. Therefore the runscript program is adapted to a more direct + approach suitable for mtxrun. + + Compilation: + + with gcc (size optimized): + + gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c + gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun + + with tcc (extra small size): + + tcc -shared -o mtxrun.dll mtxrun_dll.c + tcc -o mtxrun.exe mtxrun_exe.c mtxrun.def + +************************************************************************/ + +#include +#include +#include + +//#define STATIC +#define IS_WHITESPACE(c) ((c == ' ') || (c == '\t')) +#define MAX_CMD 32768 +#define DIE(...) { \ + fprintf( stderr, "mtxrun: " ); \ + fprintf( stderr, __VA_ARGS__ ); \ + return 1; \ +} + +char texlua_name[] = "texlua"; // just a bare name, luatex strips the rest anyway +static char cmdline[MAX_CMD]; +static char dirpath[MAX_PATH]; +static char progname[MAX_PATH]; +static char scriptpath[MAX_PATH]; +static char luatexpath[MAX_PATH]; +HMODULE dllluatex = NULL; +typedef int ( *mainlikeproc )( int, char ** ); + +#ifdef STATIC +int main( int argc, char *argv[] ) +#else +__declspec(dllexport) int dllrunscript( int argc, char *argv[] ) +#endif +{ + char *s, *luatexfname, *argstr, **lua_argv; + int k, quoted, lua_argc; + int passprogname = 0; + + // directory of this module/executable + + HMODULE module_handle = GetModuleHandle( "mtxrun.dll" ); + // if ( module_handle == NULL ) exe path will be used, which is OK too + k = (int) GetModuleFileName( module_handle, dirpath, MAX_PATH ); + if ( !k || ( k == MAX_PATH ) ) + DIE( "unable to determine a valid module name\n" ); + s = strrchr(dirpath, '\\'); + if ( s == NULL ) DIE( "no directory part in module path: %s\n", dirpath ); + *(++s) = '\0'; //remove file name, leave trailing backslash + + // program name + + k = strlen(argv[0]); + while ( k && (argv[0][k-1] != '/') && (argv[0][k-1] != '\\') ) k--; + strcpy(progname, &argv[0][k]); + s = progname; + if ( s = strrchr(s, '.') ) *s = '\0'; // remove file extension part + + // script path + + strcpy( scriptpath, dirpath ); + k = strlen(progname); + if ( k < 6 ) k = 6; // in case the program name is shorter than "mtxrun" + if ( strlen(dirpath) + k + 4 >= MAX_PATH ) + DIE( "path too long: %s%s\n", dirpath, progname ); + if ( ( strcmpi(progname,"mtxrun") == 0 ) || ( strcmpi(progname,"luatools") == 0 ) ) { + strcat( scriptpath, progname ); + strcat( scriptpath, ".lua" ); + } else { + strcat( scriptpath, "mtxrun.lua" ); + if ( strcmpi(progname,"texmfstart") != 0 ) passprogname = 1; + } + if ( GetFileAttributes(scriptpath) == INVALID_FILE_ATTRIBUTES ) + DIE( "file not found: %s\n", scriptpath ); + + // find texlua.exe + + if ( !SearchPath( + getenv( "PATH" ), // path to search (optional) + "texlua.exe", // file name to search + NULL, // file extension to add (optional) + MAX_PATH, // output buffer size + luatexpath, // output buffer pointer + &luatexfname ) // pointer to a file part in the output buffer (optional) + ) DIE( "unable to locate texlua.exe on the search path" ); + + // link directly with luatex.dll if available in texlua's dir + + strcpy( luatexfname, "luatex.dll" ); + if ( dllluatex = LoadLibrary(luatexpath) ) + { + mainlikeproc dllluatexmain = (mainlikeproc) GetProcAddress( dllluatex, "dllluatexmain" ); + if ( dllluatexmain == NULL ) + DIE( "unable to locate dllluatexmain procedure in luatex.dll" ); + + // set up argument list for texlua script + + lua_argv = (char **)malloc( (argc + 4) * sizeof(char *) ); + if ( lua_argv == NULL ) DIE( "out of memory\n" ); + lua_argv[lua_argc=0] = texlua_name; + lua_argv[++lua_argc] = scriptpath; // script to execute + if (passprogname) { + lua_argv[++lua_argc] = "--script"; + lua_argv[++lua_argc] = progname; + } + for ( k = 1; k < argc; k++ ) lua_argv[++lua_argc] = argv[k]; + lua_argv[++lua_argc] = NULL; + + // call texlua interpreter + // dllluatexmain never returns, but we pretend that it does + + k = dllluatexmain( lua_argc, lua_argv ); + if (lua_argv) free( lua_argv ); + return k; + } + + // we are still here, so no luatex.dll; spawn texlua.exe instead + + strcpy( luatexfname, "texlua.exe" ); + strcpy( cmdline, "\"" ); + strcat( cmdline, luatexpath ); + strcat( cmdline, "\" \"" ); + strcat( cmdline, scriptpath ); + strcat( cmdline, "\"" ); + if (passprogname) { + strcat( cmdline, " --script " ); + strcat( cmdline, progname ); + } + + argstr = GetCommandLine(); // get the command line of this process + if ( argstr == NULL ) DIE( "unable to retrieve the command line string\n" ); + + // skip over argv[0] in the argument string + // (it can contain embedded double quotes if launched from cmd.exe!) + + for ( quoted = 0; (*argstr) && ( !IS_WHITESPACE(*argstr) || quoted ); argstr++ ) + if (*argstr == '"') quoted = !quoted; + + // pass through all the arguments + + if ( strlen(cmdline) + strlen(argstr) >= MAX_CMD ) + DIE( "command line string too long:\n%s%s\n", cmdline, argstr ); + strcat( cmdline, argstr ); + + // create child process + + STARTUPINFO si; + PROCESS_INFORMATION pi; + ZeroMemory( &si, sizeof(si) ); + si.cb = sizeof(si); + si.dwFlags = STARTF_USESTDHANDLES;// | STARTF_USESHOWWINDOW; + //si.dwFlags = STARTF_USESHOWWINDOW; + //si.wShowWindow = SW_HIDE ; // can be used to hide console window (requires STARTF_USESHOWWINDOW flag) + si.hStdInput = GetStdHandle( STD_INPUT_HANDLE ); + si.hStdOutput = GetStdHandle( STD_OUTPUT_HANDLE ); + si.hStdError = GetStdHandle( STD_ERROR_HANDLE ); + ZeroMemory( &pi, sizeof(pi) ); + + if( !CreateProcess( + NULL, // module name (uses command line if NULL) + cmdline, // command line + NULL, // process security atrributes + NULL, // thread security atrributes + TRUE, // handle inheritance + 0, // creation flags, e.g. CREATE_NEW_CONSOLE, CREATE_NO_WINDOW, DETACHED_PROCESS + NULL, // pointer to environment block (uses parent if NULL) + NULL, // starting directory (uses parent if NULL) + &si, // STARTUPINFO structure + &pi ) // PROCESS_INFORMATION structure + ) DIE( "command execution failed: %s\n", cmdline ); + + DWORD ret = 0; + CloseHandle( pi.hThread ); // thread handle is not needed + if ( WaitForSingleObject( pi.hProcess, INFINITE ) == WAIT_OBJECT_0 ) { + if ( !GetExitCodeProcess( pi.hProcess, &ret) ) + DIE( "unable to retrieve process exit code: %s\n", cmdline ); + } else DIE( "failed to wait for process termination: %s\n", cmdline ); + CloseHandle( pi.hProcess ); + + // propagate exit code from the child process + + return ret; + +} diff --git a/Master/texmf-dist/scripts/context/stubs/source/mtxrun_exe.c b/Master/texmf-dist/scripts/context/stubs/source/mtxrun_exe.c new file mode 100644 index 00000000000..0c27c272e37 --- /dev/null +++ b/Master/texmf-dist/scripts/context/stubs/source/mtxrun_exe.c @@ -0,0 +1,8 @@ +// This is the .exe part of the mtxrun program, see mtxrun_dll.c +// for more details. + +#include + +__declspec(dllimport) int dllrunscript( int argc, char *argv[] ); + +int main( int argc, char *argv[] ) { return dllrunscript( argc, argv ); } diff --git a/Master/texmf-dist/scripts/context/stubs/source/readme.txt b/Master/texmf-dist/scripts/context/stubs/source/readme.txt new file mode 100644 index 00000000000..354d85b0920 --- /dev/null +++ b/Master/texmf-dist/scripts/context/stubs/source/readme.txt @@ -0,0 +1,36 @@ +Copyright: + +The originally 'runscript' program was written by in 2009 by +T.M.Trzeciak and is public domain. This derived mtxrun program +is an adapted version by Hans Hagen. + +Comment: + +In ConTeXt MkIV we have two core scripts: luatools.lua and +mtxrun.lua where the second one is used to launch other scripts. +Normally a user will use a call like: + +mtxrun --script font --reload + +Here mtxrun is a lua script. In order to avoid the usage of a cmd +file on windows this runner will start texlua directly. In TeXlive +a runner is added for each cmd file but we don't want that overhead +(and extra files). By using an exe we can call these scripts in +batch files without the need for using call. + +We also don't want to use other runners, like those that use kpse +to locate the script as this is exactly what mtxrun itself is doing +already. Therefore the runscript program is adapted to a more direct +approach suitable for mtxrun. + +Compilation: + +with gcc (size optimized): + +gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c +gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun + +with tcc (ver. 0.9.24), extra small size + +tcc -shared -o runscript.dll runscript_dll.c +tcc -o runscript.exe runscript_exe.c runscript.def diff --git a/Master/texmf-dist/scripts/context/stubs/unix/ctxtools b/Master/texmf-dist/scripts/context/stubs/unix/ctxtools deleted file mode 100755 index 4658a345a8c..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/ctxtools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute ctxtools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/luatools b/Master/texmf-dist/scripts/context/stubs/unix/luatools index 433d1b8dc0a..1d87322c108 100644 --- a/Master/texmf-dist/scripts/context/stubs/unix/luatools +++ b/Master/texmf-dist/scripts/context/stubs/unix/luatools @@ -39,13 +39,16 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-string'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local sub, gsub, find, match, gmatch, format, char, byte, rep = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep +local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower +local lpegmatch = lpeg.match + +-- some functions may disappear as they are not used anywhere if not string.split then @@ -85,8 +88,16 @@ function string:unquote() return (gsub(self,"^([\"\'])(.*)%1$","%2")) end +--~ function string:unquote() +--~ if find(self,"^[\'\"]") then +--~ return sub(self,2,-2) +--~ else +--~ return self +--~ end +--~ end + function string:quote() -- we could use format("%q") - return '"' .. self:unquote() .. '"' + return format("%q",self) end function string:count(pattern) -- variant 3 @@ -106,12 +117,23 @@ function string:limit(n,sentinel) end end -function string:strip() - return (gsub(self,"^%s*(.-)%s*$", "%1")) +--~ function string:strip() -- the .- is quite efficient +--~ -- return match(self,"^%s*(.-)%s*$") or "" +--~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list +--~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)') +--~ end + +do -- roberto's variant: + local space = lpeg.S(" \t\v\n") + local nospace = 1 - space + local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0) + function string.strip(str) + return lpegmatch(stripper,str) or "" + end end function string:is_empty() - return not find(find,"%S") + return not find(self,"%S") end function string:enhance(pattern,action) @@ -145,14 +167,14 @@ if not string.characters then local function nextchar(str, index) index = index + 1 - return (index <= #str) and index or nil, str:sub(index,index) + return (index <= #str) and index or nil, sub(str,index,index) end function string:characters() return nextchar, self, 0 end local function nextbyte(str, index) index = index + 1 - return (index <= #str) and index or nil, byte(str:sub(index,index)) + return (index <= #str) and index or nil, byte(sub(str,index,index)) end function string:bytes() return nextbyte, self, 0 @@ -165,7 +187,7 @@ end function string:rpadd(n,chr) local m = n-#self if m > 0 then - return self .. self.rep(chr or " ",m) + return self .. rep(chr or " ",m) else return self end @@ -174,7 +196,7 @@ end function string:lpadd(n,chr) local m = n-#self if m > 0 then - return self.rep(chr or " ",m) .. self + return rep(chr or " ",m) .. self else return self end @@ -222,6 +244,17 @@ function string:pattesc() return (gsub(self,".",patterns_escapes)) end +local simple_escapes = { + ["-"] = "%-", + ["."] = "%.", + ["?"] = ".", + ["*"] = ".*", +} + +function string:simpleesc() + return (gsub(self,".",simple_escapes)) +end + function string:tohash() local t = { } for s in gmatch(self,"([^, ]+)") do -- lpeg @@ -233,10 +266,10 @@ end local pattern = lpeg.Ct(lpeg.C(1)^0) function string:totable() - return pattern:match(self) + return lpegmatch(pattern,self) end ---~ for _, str in ipairs { +--~ local t = { --~ "1234567123456712345671234567", --~ "a\tb\tc", --~ "aa\tbb\tcc", @@ -244,7 +277,10 @@ end --~ "aaaa\tbbbb\tcccc", --~ "aaaaa\tbbbbb\tccccc", --~ "aaaaaa\tbbbbbb\tcccccc", ---~ } do print(string.tabtospace(str)) end +--~ } +--~ for k,v do +--~ print(string.tabtospace(t[k])) +--~ end function string.tabtospace(str,tab) -- we don't handle embedded newlines @@ -252,7 +288,7 @@ function string.tabtospace(str,tab) local s = find(str,"\t") if s then if not tab then tab = 7 end -- only when found - local d = tab-(s-1)%tab + local d = tab-(s-1) % tab if d > 0 then str = gsub(str,"\t",rep(" ",d),1) else @@ -271,6 +307,25 @@ function string:compactlong() -- strips newlines and leading spaces return self end +function string:striplong() -- strips newlines and leading spaces + self = gsub(self,"^%s*","") + self = gsub(self,"[\n\r]+ *","\n") + return self +end + +function string:topattern(lowercase,strict) + if lowercase then + self = lower(self) + end + self = gsub(self,".",simple_escapes) + if self == "" then + self = ".*" + elseif strict then + self = "^" .. self .. "$" + end + return self +end + end -- of closure @@ -278,58 +333,64 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-lpeg'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc - ---~ l-lpeg.lua : - ---~ lpeg.digit = lpeg.R('09')^1 ---~ lpeg.sign = lpeg.S('+-')^1 ---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1) ---~ lpeg.number = lpeg.float + lpeg.integer ---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1 ---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1 ---~ lpeg.uppercase = lpeg.P("AZ") ---~ lpeg.lowercase = lpeg.P("az") - ---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed ---~ lpeg.space = lpeg.S(' ')^1 ---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1 ---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1 ---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1 - -local hash = { } +local lpeg = require("lpeg") + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + +local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V +local match = lpeg.match + +local digit, sign = R('09'), S('+-') +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local utf8byte = R("\128\191") + +patterns.utf8byte = utf8byte +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8byte +patterns.utf8three = R("\224\239") * utf8byte * utf8byte +patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte + +patterns.digit = digit +patterns.sign = sign +patterns.cardinal = sign^0 * digit^1 +patterns.integer = sign^0 * digit^1 +patterns.float = sign^0 * digit^0 * P('.') * digit^1 +patterns.number = patterns.float + patterns.integer +patterns.oct = P("0") * R("07")^1 +patterns.octal = patterns.oct +patterns.HEX = P("0x") * R("09","AF")^1 +patterns.hex = P("0x") * R("09","af")^1 +patterns.hexadecimal = P("0x") * R("09","AF","af")^1 +patterns.lowercase = R("az") +patterns.uppercase = R("AZ") +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = S(" ") +patterns.eol = S("\n\r") +patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) +patterns.newline = crlf + cr + lf +patterns.nonspace = 1 - patterns.space +patterns.nonspacer = 1 - patterns.spacer +patterns.whitespace = patterns.eol + patterns.spacer +patterns.nonwhitespace = 1 - patterns.whitespace +patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * lpeg.V(1) } -end - -function lpeg.startswith(pattern) --slightly adapted - return P(pattern) + return P { P(pattern) + 1 * V(1) } -- why so complex? end function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end --- variant: - ---~ local parser = lpeg.Ct(lpeg.splitat(newline)) - -local crlf = P("\r\n") -local cr = P("\r") -local lf = P("\n") -local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -local newline = crlf + cr + lf -local spacing = space^0 * newline - +local spacing = patterns.spacer^0 * patterns.newline -- sort of strip local empty = spacing * Cc("") local nonempty = Cs((1-spacing)^1) * spacing^-1 local content = (empty + nonempty)^1 @@ -337,15 +398,15 @@ local content = (empty + nonempty)^1 local capture = Ct(content^0) function string:splitlines() - return capture:match(self) + return match(capture,self) end -lpeg.linebyline = content -- better make a sublibrary +patterns.textline = content ---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more ---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more ---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps ---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps local splitters_s, splitters_m = { }, { } @@ -355,7 +416,7 @@ local function splitat(separator,single) separator = P(separator) if single then local other, any = C((1 - separator)^0), P(1) - splitter = other * (separator * C(any^0) + "") + splitter = other * (separator * C(any^0) + "") -- ? splitters_s[separator] = splitter else local other = C((1 - separator)^0) @@ -370,15 +431,72 @@ lpeg.splitat = splitat local cache = { } +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = Ct(splitat(separator)) + cache[separator] = c + end + return match(c,str) +end + function string:split(separator) local c = cache[separator] if not c then c = Ct(splitat(separator)) cache[separator] = c end - return c:match(self) + return match(c,self) +end + +lpeg.splitters = cache + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,str) +end + +function string:checkedsplit(separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,self) end +--~ function lpeg.append(list,pp) +--~ local p = pp +--~ for l=1,#list do +--~ if p then +--~ p = p + P(list[l]) +--~ else +--~ p = P(list[l]) +--~ end +--~ end +--~ return p +--~ end + +--~ from roberto's site: + +local f1 = string.byte + +local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + end -- of closure @@ -386,7 +504,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-table'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -395,9 +513,10 @@ if not modules then modules = { } end modules ['l-table'] = { table.join = table.concat local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove -local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump +local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match local getmetatable, setmetatable = getmetatable, setmetatable -local type, next, tostring, ipairs = type, next, tostring, ipairs +local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs +local unpack = unpack or table.unpack function table.strip(tab) local lst = { } @@ -412,6 +531,14 @@ function table.strip(tab) return lst end +function table.keys(t) + local k = { } + for key, _ in next, t do + k[#k+1] = key + end + return k +end + local function compare(a,b) return (tostring(a) < tostring(b)) end @@ -455,7 +582,7 @@ end table.sortedkeys = sortedkeys table.sortedhashkeys = sortedhashkeys -function table.sortedpairs(t) +function table.sortedhash(t) local s = sortedhashkeys(t) -- maybe just sortedkeys local n = 0 local function kv(s) @@ -466,6 +593,8 @@ function table.sortedpairs(t) return kv, s end +table.sortedpairs = table.sortedhash + function table.append(t, list) for _,v in next, list do insert(t,v) @@ -588,18 +717,18 @@ end -- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) -function table.is_empty(t) +function table.is_empty(t) -- obolete, use inline code instead return not t or not next(t) end -function table.one_entry(t) +function table.one_entry(t) -- obolete, use inline code instead local n = next(t) return n and not next(t,n) end -function table.starts_at(t) - return ipairs(t,1)(t,0) -end +--~ function table.starts_at(t) -- obsolete, not nice +--~ return ipairs(t,1)(t,0) +--~ end function table.tohash(t,value) local h = { } @@ -677,6 +806,8 @@ end -- -- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) +-- problem: there no good number_to_string converter with the best resolution + local function do_serialize(root,name,depth,level,indexed) if level > 0 then depth = depth .. " " @@ -699,8 +830,9 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s{",depth)) end end + -- we could check for k (index) being number (cardinal) if root and next(root) then - local first, last = nil, 0 -- #root cannot be trusted here + local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) if compact then -- NOT: for k=1,#root do (we need to quit at nil) for k,v in ipairs(root) do -- can we use next? @@ -721,10 +853,10 @@ local function do_serialize(root,name,depth,level,indexed) if hexify then handle(format("%s 0x%04X,",depth,v)) else - handle(format("%s %s,",depth,v)) + handle(format("%s %s,",depth,v)) -- %.99g end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) @@ -761,29 +893,29 @@ local function do_serialize(root,name,depth,level,indexed) --~ if hexify then --~ handle(format("%s %s=0x%04X,",depth,key(k),v)) --~ else - --~ handle(format("%s %s=%s,",depth,key(k),v)) + --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g --~ end if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) else - handle(format("%s [%s]=%s,",depth,k,v)) + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g end elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then if hexify then handle(format("%s %s=0x%04X,",depth,k,v)) else - handle(format("%s %s=%s,",depth,k,v)) + handle(format("%s %s=%s,",depth,k,v)) -- %.99g end else if hexify then handle(format("%s [%q]=0x%04X,",depth,k,v)) else - handle(format("%s [%q]=%s,",depth,k,v)) + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g end end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then --~ handle(format("%s %s=%s,",depth,key(k),v)) if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then @@ -992,7 +1124,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify) end end -local function flatten(t,f,complete) +local function flatten(t,f,complete) -- is this used? meybe a variant with next, ... for i=1,#t do local v = t[i] if type(v) == "table" then @@ -1021,6 +1153,24 @@ end table.flatten_one_level = table.unnest +-- a better one: + +local function flattened(t,f) + if not f then + f = { } + end + for k, v in next, t do + if type(v) == "table" then + flattened(v,f) + else + f[k] = v + end + end + return f +end + +table.flattened = flattened + -- the next three may disappear function table.remove_value(t,value) -- todo: n @@ -1156,7 +1306,7 @@ function table.clone(t,p) -- t is optional or nil or table elseif not t then t = { } end - setmetatable(t, { __index = function(_,key) return p[key] end }) + setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ? return t end @@ -1184,21 +1334,35 @@ function table.reverse(t) return tt end ---~ function table.keys(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return k ---~ end +function table.insert_before_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i,extra) + return + end + end + insert(t,1,extra) +end ---~ function table.keys_as_string(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return concat(k,"") ---~ end +function table.insert_after_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i+1,extra) + return + end + end + insert(t,#t+1,extra) +end end -- of closure @@ -1207,13 +1371,13 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-io'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local byte = string.byte +local byte, find, gsub = string.byte, string.find, string.gsub if string.find(os.getenv("PATH"),";") then io.fileseparator, io.pathseparator = "\\", ";" @@ -1242,7 +1406,7 @@ function io.savedata(filename,data,joiner) elseif type(data) == "function" then data(f) else - f:write(data) + f:write(data or "") end f:close() return true @@ -1371,20 +1535,21 @@ function io.ask(question,default,options) end io.write(string.format(" ")) local answer = io.read() - answer = answer:gsub("^%s*(.*)%s*$","%1") + answer = gsub(answer,"^%s*(.*)%s*$","%1") if answer == "" and default then return default elseif not options then return answer else - for _,v in pairs(options) do - if v == answer then + for k=1,#options do + if options[k] == answer then return answer end end local pattern = "^" .. answer - for _,v in pairs(options) do - if v:find(pattern) then + for k=1,#options do + local v = options[k] + if find(v,pattern) then return v end end @@ -1399,20 +1564,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-number'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format = string.format +local tostring = tostring +local format, floor, insert, match = string.format, math.floor, table.insert, string.match +local lpegmatch = lpeg.match number = number or { } -- a,b,c,d,e,f = number.toset(100101) function number.toset(n) - return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") + return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") end function number.toevenhex(n) @@ -1438,10 +1605,21 @@ end local one = lpeg.C(1-lpeg.S(''))^1 function number.toset(n) - return one:match(tostring(n)) + return lpegmatch(one,tostring(n)) end - +function number.bits(n,zero) + local t, i = { }, (zero and 0) or 1 + while n > 0 do + local m = n % 2 + if m > 0 then + insert(t,1,i) + end + n = floor(n/2) + i = i + 1 + end + return t +end end -- of closure @@ -1450,7 +1628,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-set'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1540,46 +1718,63 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-os'] = { version = 1.001, - comment = "companion to luat-lub.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local find = string.find +-- maybe build io.flush in os.execute + +local find, format, gsub = string.find, string.format, string.gsub +local random, ceil = math.random, math.ceil + +local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush + +function os.execute(...) ioflush() return execute(...) end +function os.spawn (...) ioflush() return spawn (...) end +function os.exec (...) ioflush() return exec (...) end function os.resultof(command) - return io.popen(command,"r"):read("*all") + ioflush() -- else messed up logging + local handle = io.popen(command,"r") + if not handle then + -- print("unknown command '".. command .. "' in os.resultof") + return "" + else + return handle:read("*all") or "" + end end -if not os.exec then os.exec = os.execute end -if not os.spawn then os.spawn = os.execute end - ---~ os.type : windows | unix (new, we already guessed os.platform) ---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.type : windows | unix (new, we already guessed os.platform) +--~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.platform : extended os.name with architecture if not io.fileseparator then if find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator, os.platform = "\\", ";", os.type or "windows" + io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" else - io.fileseparator, io.pathseparator, os.platform = "/" , ":", os.type or "unix" + io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" end end -os.platform = os.platform or os.type or (io.pathseparator == ";" and "windows") or "unix" +os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" +os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" + +if os.type == "windows" then + os.libsuffix, os.binsuffix = 'dll', 'exe' +else + os.libsuffix, os.binsuffix = 'so', '' +end function os.launch(str) - if os.platform == "windows" then + if os.type == "windows" then os.execute("start " .. str) -- os.spawn ? else os.execute(str .. " &") -- os.spawn ? end end -if not os.setenv then - function os.setenv() return false end -end - if not os.times then -- utime = user time -- stime = system time @@ -1609,64 +1804,218 @@ end --~ print(os.date("%H:%M:%S",os.gettimeofday())) --~ print(os.date("%H:%M:%S",os.time())) -os.arch = os.arch or function() - local a = os.resultof("uname -m") or "linux" - os.arch = function() - return a +-- no need for function anymore as we have more clever code and helpers now +-- this metatable trickery might as well disappear + +os.resolvers = os.resolvers or { } + +local resolvers = os.resolvers + +local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil +local osix = osmt.__index + +osmt.__index = function(t,k) + return (resolvers[k] or osix)(t,k) +end + +setmetatable(os,osmt) + +if not os.setenv then + + -- we still store them but they won't be seen in + -- child processes although we might pass them some day + -- using command concatination + + local env, getenv = { }, os.getenv + + function os.setenv(k,v) + env[k] = v + end + + function os.getenv(k) + return env[k] or getenv(k) end - return a + end -local platform +-- we can use HOSTTYPE on some platforms -function os.currentplatform(name,default) - if not platform then - local name = os.name or os.platform or name -- os.name is built in, os.platform is mine - if not name then - platform = default or "linux" - elseif name == "windows" or name == "mswin" or name == "win32" or name == "msdos" then - if os.getenv("PROCESSOR_ARCHITECTURE") == "AMD64" then - platform = "mswin-64" - else - platform = "mswin" - end +local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" + +local function guess() + local architecture = os.resultof("uname -m") or "" + if architecture ~= "" then + return architecture + end + architecture = os.getenv("HOSTTYPE") or "" + if architecture ~= "" then + return architecture + end + return os.resultof("echo $HOSTTYPE") or "" +end + +if platform ~= "" then + + os.platform = platform + +elseif os.type == "windows" then + + -- we could set the variable directly, no function needed here + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" + if find(architecture,"AMD64") then + platform = "mswin-64" else - local architecture = os.arch() - if name == "linux" then - if find(architecture,"x86_64") then - platform = "linux-64" - elseif find(architecture,"ppc") then - platform = "linux-ppc" - else - platform = "linux" - end - elseif name == "macosx" then - if find(architecture,"i386") then - platform = "osx-intel" - else - platform = "osx-ppc" - end - elseif name == "sunos" then - if find(architecture,"sparc") then - platform = "solaris-sparc" - else -- if architecture == 'i86pc' - platform = "solaris-intel" - end - elseif name == "freebsd" then - if find(architecture,"amd64") then - platform = "freebsd-amd64" - else - platform = "freebsd" - end - else - platform = default or name - end + platform = "mswin" end - function os.currentplatform() - return platform + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "linux" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "linux-64" + elseif find(architecture,"ppc") then + platform = "linux-ppc" + else + platform = "linux" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "macosx" then + + --[[ + Identifying the architecture of OSX is quite a mess and this + is the best we can come up with. For some reason $HOSTTYPE is + a kind of pseudo environment variable, not known to the current + environment. And yes, uname cannot be trusted either, so there + is a change that you end up with a 32 bit run on a 64 bit system. + Also, some proper 64 bit intel macs are too cheap (low-end) and + therefore not permitted to run the 64 bit kernel. + ]]-- + + function os.resolvers.platform(t,k) + -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" + -- if architecture == "" then + -- architecture = os.resultof("echo $HOSTTYPE") or "" + -- end + local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" + if architecture == "" then + -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") + platform = "osx-intel" + elseif find(architecture,"i386") then + platform = "osx-intel" + elseif find(architecture,"x86_64") then + platform = "osx-64" + else + platform = "osx-ppc" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "sunos" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"sparc") then + platform = "solaris-sparc" + else -- if architecture == 'i86pc' + platform = "solaris-intel" end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "freebsd" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"amd64") then + platform = "freebsd-amd64" + else + platform = "freebsd" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "kfreebsd" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "kfreebsd-64" + else + platform = "kfreebsd-i386" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +else + + -- platform = "linux" + -- os.setenv("MTX_PLATFORM",platform) + -- os.platform = platform + + function os.resolvers.platform(t,k) + local platform = "linux" + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +end + +-- beware, we set the randomseed + +-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the +-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom +-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal +-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. +-- +-- as we don't call this function too often there is not so much risk on repetition + +local t = { 8, 9, "a", "b" } + +function os.uuid() + return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", + random(0xFFFF),random(0xFFFF), + random(0x0FFF), + t[ceil(random(4))] or 8,random(0x0FFF), + random(0xFFFF), + random(0xFFFF),random(0xFFFF),random(0xFFFF) + ) +end + +local d + +function os.timezone(delta) + d = d or tonumber(tonumber(os.date("%H")-os.date("!%H"))) + if delta then + if d > 0 then + return format("+%02i:00",d) + else + return format("-%02i:00",-d) + end + else + return 1 end - return platform end @@ -1676,7 +2025,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-file'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1687,14 +2036,17 @@ if not modules then modules = { } end modules ['l-file'] = { file = file or { } local concat = table.concat -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char +local lpegmatch = lpeg.match function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) end function file.addsuffix(filename, suffix) - if not find(filename,"%.[%a%d]+$") then + if not suffix or suffix == "" then + return filename + elseif not find(filename,"%.[%a%d]+$") then return filename .. "." .. suffix else return filename @@ -1717,20 +2069,39 @@ function file.nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$","")) end -function file.extname(name) - return match(name,"^.+%.([^/\\]-)$") or "" +function file.extname(name,default) + return match(name,"^.+%.([^/\\]-)$") or default or "" end file.suffix = file.extname ---~ print(file.join("x/","/y")) ---~ print(file.join("http://","/y")) ---~ print(file.join("http://a","/y")) ---~ print(file.join("http:///a","/y")) ---~ print(file.join("//nas-1","/y")) +--~ function file.join(...) +--~ local pth = concat({...},"/") +--~ pth = gsub(pth,"\\","/") +--~ local a, b = match(pth,"^(.*://)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ a, b = match(pth,"^(//)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ return (gsub(pth,"//+","/")) +--~ end + +local trick_1 = char(1) +local trick_2 = "^" .. trick_1 .. "/+" function file.join(...) - local pth = concat({...},"/") + local lst = { ... } + local a, b = lst[1], lst[2] + if a == "" then + lst[1] = trick_1 + elseif b and find(a,"^/+$") and find(b,"^/") then + lst[1] = "" + lst[2] = gsub(b,"^/+","") + end + local pth = concat(lst,"/") pth = gsub(pth,"\\","/") local a, b = match(pth,"^(.*://)(.*)$") if a and b then @@ -1740,17 +2111,28 @@ function file.join(...) if a and b then return a .. gsub(b,"//+","/") end + pth = gsub(pth,trick_2,"") return (gsub(pth,"//+","/")) end +--~ print(file.join("//","/y")) +--~ print(file.join("/","/y")) +--~ print(file.join("","/y")) +--~ print(file.join("/x/","/y")) +--~ print(file.join("x/","/y")) +--~ print(file.join("http://","/y")) +--~ print(file.join("http://a","/y")) +--~ print(file.join("http:///a","/y")) +--~ print(file.join("//nas-1","/y")) + function file.iswritable(name) local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,".")) - return a and a.permissions:sub(2,2) == "w" + return a and sub(a.permissions,2,2) == "w" end function file.isreadable(name) local a = lfs.attributes(name) - return a and a.permissions:sub(1,1) == "r" + return a and sub(a.permissions,1,1) == "r" end file.is_readable = file.isreadable @@ -1758,36 +2140,50 @@ file.is_writable = file.iswritable -- todo: lpeg -function file.split_path(str) - local t = { } - str = gsub(str,"\\", "/") - str = gsub(str,"(%a):([;/])", "%1\001%2") - for name in gmatch(str,"([^;:]+)") do - if name ~= "" then - t[#t+1] = gsub(name,"\001",":") - end - end - return t +--~ function file.split_path(str) +--~ local t = { } +--~ str = gsub(str,"\\", "/") +--~ str = gsub(str,"(%a):([;/])", "%1\001%2") +--~ for name in gmatch(str,"([^;:]+)") do +--~ if name ~= "" then +--~ t[#t+1] = gsub(name,"\001",":") +--~ end +--~ end +--~ return t +--~ end + +local checkedsplit = string.checkedsplit + +function file.split_path(str,separator) + str = gsub(str,"\\","/") + return checkedsplit(str,separator or io.pathseparator) end function file.join_path(tab) return concat(tab,io.pathseparator) -- can have trailing // end +-- we can hash them weakly + function file.collapse_path(str) - str = gsub(str,"/%./","/") - local n, m = 1, 1 - while n > 0 or m > 0 do - str, n = gsub(str,"[^/%.]+/%.%.$","") - str, m = gsub(str,"[^/%.]+/%.%./","") - end - str = gsub(str,"([^/])/$","%1") - str = gsub(str,"^%./","") - str = gsub(str,"/%.$","") + str = gsub(str,"\\","/") + if find(str,"/") then + str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified + str = gsub(str,"/%./","/") + local n, m = 1, 1 + while n > 0 or m > 0 do + str, n = gsub(str,"[^/%.]+/%.%.$","") + str, m = gsub(str,"[^/%.]+/%.%./","") + end + str = gsub(str,"([^/])/$","%1") + -- str = gsub(str,"^%./","") -- ./xx in qualified + str = gsub(str,"/%.$","") + end if str == "" then str = "." end return str end +--~ print(file.collapse_path("/a")) --~ print(file.collapse_path("a/./b/..")) --~ print(file.collapse_path("a/aa/../b/bb")) --~ print(file.collapse_path("a/../..")) @@ -1817,27 +2213,27 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1 --~ function file.extname(name) ---~ return pattern:match(name) or "" +--~ return lpegmatch(pattern,name) or "" --~ end --~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1) --~ function file.removesuffix(name) ---~ return pattern:match(name) +--~ return lpegmatch(pattern,name) --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1 --~ function file.basename(name) ---~ return pattern:match(name) or name +--~ return lpegmatch(pattern,name) or name --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1 --~ function file.dirname(name) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) +--~ return sub(name,1,p-2) --~ else --~ return "" --~ end @@ -1846,7 +2242,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.addsuffix(name, suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then --~ return name --~ else @@ -1857,9 +2253,9 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.replacesuffix(name,suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) .. "." .. suffix +--~ return sub(name,1,p-2) .. "." .. suffix --~ else --~ return name .. "." .. suffix --~ end @@ -1868,11 +2264,11 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1 --~ function file.nameonly(name) ---~ local a, b = pattern:match(name) +--~ local a, b = lpegmatch(pattern,name) --~ if b then ---~ return name:sub(a,b-2) +--~ return sub(name,a,b-2) --~ elseif a then ---~ return name:sub(a) +--~ return sub(name,a) --~ else --~ return name --~ end @@ -1906,11 +2302,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":") -- ./name ../name /name c: :// name/name function file.is_qualified_path(filename) - return qualified:match(filename) + return lpegmatch(qualified,filename) ~= nil end function file.is_rootbased_path(filename) - return rootbased:match(filename) + return lpegmatch(rootbased,filename) ~= nil end local slash = lpeg.S("\\/") @@ -1923,16 +2319,25 @@ local base = lpeg.C((1-suffix)^0) local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc("")) function file.splitname(str) -- returns drive, path, base, suffix - return pattern:match(str) + return lpegmatch(pattern,str) end --- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end -- -- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } -- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } -- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } -- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } +--~ -- todo: +--~ +--~ if os.type == "windows" then +--~ local currentdir = lfs.currentdir +--~ function lfs.currentdir() +--~ return (gsub(currentdir(),"\\","/")) +--~ end +--~ end + end -- of closure @@ -1997,7 +2402,7 @@ end function file.loadchecksum(name) if md5 then local data = io.loaddata(name .. ".md5") - return data and data:gsub("%s","") + return data and (gsub(data,"%s","")) end return nil end @@ -2018,14 +2423,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-url'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local char, gmatch = string.char, string.gmatch +local char, gmatch, gsub = string.char, string.gmatch, string.gsub local tonumber, type = tonumber, type +local lpegmatch = lpeg.match -- from the spec (on the web): -- @@ -2049,7 +2455,9 @@ local hexdigit = lpeg.R("09","AF","af") local plus = lpeg.P("+") local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar) -local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^0) * colon + lpeg.Cc("") +-- we assume schemes with more than 1 character (in order to avoid problems with windows disks) + +local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("") local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("") local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("") local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("") @@ -2057,25 +2465,51 @@ local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0 local parser = lpeg.Ct(scheme * authority * path * query * fragment) +-- todo: reconsider Ct as we can as well have five return values (saves a table) +-- so we can have two parsers, one with and one without + function url.split(str) - return (type(str) == "string" and parser:match(str)) or str + return (type(str) == "string" and lpegmatch(parser,str)) or str end +-- todo: cache them + function url.hashed(str) local s = url.split(str) + local somescheme = s[1] ~= "" return { - scheme = (s[1] ~= "" and s[1]) or "file", + scheme = (somescheme and s[1]) or "file", authority = s[2], - path = s[3], - query = s[4], - fragment = s[5], - original = str + path = s[3], + query = s[4], + fragment = s[5], + original = str, + noscheme = not somescheme, } end +function url.hasscheme(str) + return url.split(str)[1] ~= "" +end + +function url.addscheme(str,scheme) + return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str) +end + +function url.construct(hash) + local fullurl = hash.sheme .. "://".. hash.authority .. hash.path + if hash.query then + fullurl = fullurl .. "?".. hash.query + end + if hash.fragment then + fullurl = fullurl .. "?".. hash.fragment + end + return fullurl +end + function url.filename(filename) local t = url.hashed(filename) - return (t.scheme == "file" and t.path:gsub("^/([a-zA-Z])([:|])/)","%1:")) or filename + return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename end function url.query(str) @@ -2129,24 +2563,59 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-dir'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- dir.expand_name will be merged with cleanpath and collapsepath + local type = type -local find, gmatch = string.find, string.gmatch +local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local lpegmatch = lpeg.match dir = dir or { } +-- handy + +function dir.current() + return (gsub(lfs.currentdir(),"\\","/")) +end + -- optimizing for no string.find (*) does not save time local attributes = lfs.attributes local walkdir = lfs.dir -local function glob_pattern(path,patt,recurse,action) +local function glob_pattern(path,patt,recurse,action) + local ok, scanner + if path == "/" then + ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe + else + ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe + end + if ok and type(scanner) == "function" then + if not find(path,"/$") then path = path .. '/' end + for name in scanner do + local full = path .. name + local mode = attributes(full,'mode') + if mode == 'file' then + if find(full,patt) then + action(full) + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + glob_pattern(full,patt,recurse,action) + end + end + end +end + +dir.glob_pattern = glob_pattern + +local function collect_pattern(path,patt,recurse,result) local ok, scanner + result = result or { } if path == "/" then ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe else @@ -2156,19 +2625,22 @@ local function glob_pattern(path,patt,recurse,action) if not find(path,"/$") then path = path .. '/' end for name in scanner do local full = path .. name - local mode = attributes(full,'mode') + local attr = attributes(full) + local mode = attr.mode if mode == 'file' then if find(full,patt) then - action(full) + result[name] = attr end elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then - glob_pattern(full,patt,recurse,action) + attr.list = collect_pattern(full,patt,recurse) + result[name] = attr end end end + return result end -dir.glob_pattern = glob_pattern +dir.collect_pattern = collect_pattern local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V @@ -2189,29 +2661,48 @@ local filter = Cs ( ( )^0 ) local function glob(str,t) - if type(str) == "table" then - local t = t or { } - for s=1,#str do - glob(str[s],t) + if type(t) == "function" then + if type(str) == "table" then + for s=1,#str do + glob(str[s],t) + end + elseif lfs.isfile(str) then + t(str) + else + local split = lpegmatch(pattern,str) + if split then + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,t) + end end - return t - elseif lfs.isfile(str) then - local t = t or { } - t[#t+1] = str - return t else - local split = pattern:match(str) - if split then + if type(str) == "table" then local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = filter:match(start .. base) - glob_pattern(start,result,recurse,action) + for s=1,#str do + glob(str[s],t) + end + return t + elseif lfs.isfile(str) then + local t = t or { } + t[#t+1] = str return t else - return { } + local split = lpegmatch(pattern,str) + if split then + local t = t or { } + local action = action or function(name) t[#t+1] = name end + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,action) + return t + else + return { } + end end end end @@ -2273,11 +2764,12 @@ end local make_indeed = true -- false -if string.find(os.getenv("PATH"),";") then +if string.find(os.getenv("PATH"),";") then -- os.type == "windows" function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2288,13 +2780,13 @@ if string.find(os.getenv("PATH"),";") then end local first, middle, last local drive = false - first, middle, last = str:match("^(//)(//*)(.*)$") + first, middle, last = match(str,"^(//)(//*)(.*)$") if first then -- empty network path == local path else - first, last = str:match("^(//)/*(.-)$") + first, last = match(str,"^(//)/*(.-)$") if first then - middle, last = str:match("([^/]+)/+(.-)$") + middle, last = match(str,"([^/]+)/+(.-)$") if middle then pth = "//" .. middle else @@ -2302,11 +2794,11 @@ if string.find(os.getenv("PATH"),";") then last = "" end else - first, middle, last = str:match("^([a-zA-Z]:)(/*)(.-)$") + first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") if first then pth, drive = first .. middle, true else - middle, last = str:match("^(/*)(.-)$") + middle, last = match(str,"^(/*)(.-)$") if not middle then last = str end @@ -2340,34 +2832,31 @@ if string.find(os.getenv("PATH"),";") then --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) - local first, nothing, last = str:match("^(//)(//*)(.*)$") + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath + local first, nothing, last = match(str,"^(//)(//*)(.*)$") if first then - first = lfs.currentdir() .. "/" - first = first:gsub("\\","/") + first = dir.current() .. "/" end if not first then - first, last = str:match("^(//)/*(.*)$") + first, last = match(str,"^(//)/*(.*)$") end if not first then - first, last = str:match("^([a-zA-Z]:)(.*)$") + first, last = match(str,"^([a-zA-Z]:)(.*)$") if first and not find(last,"^/") then local d = lfs.currentdir() if lfs.chdir(first) then - first = lfs.currentdir() - first = first:gsub("\\","/") + first = dir.current() end lfs.chdir(d) end end if not first then - first, last = lfs.currentdir(), str - first = first:gsub("\\","/") + first, last = dir.current(), str end - last = last:gsub("//","/") - last = last:gsub("/%./","/") - last = last:gsub("^/*","") - first = first:gsub("/*$","") + last = gsub(last,"//","/") + last = gsub(last,"/%./","/") + last = gsub(last,"^/*","") + first = gsub(first,"/*$","") if last == "" then return first else @@ -2378,8 +2867,9 @@ if string.find(os.getenv("PATH"),";") then else function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2388,7 +2878,7 @@ else end end end - str = str:gsub("/+","/") + str = gsub(str,"/+","/") if find(str,"^/") then pth = "/" for s in gmatch(str,"[^/]+") do @@ -2422,12 +2912,12 @@ else --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath if not find(str,"^/") then str = lfs.currentdir() .. "/" .. str end - str = str:gsub("//","/") - str = str:gsub("/%./","/") + str = gsub(str,"//","/") + str = gsub(str,"/%./","/") return str end @@ -2442,7 +2932,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-boolean'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2503,19 +2993,40 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-unicode'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +if not unicode then + + unicode = { utf8 = { } } + + local floor, char = math.floor, string.char + + function unicode.utf8.utfchar(n) + if n < 0x80 then + return char(n) + elseif n < 0x800 then + return char(0xC0 + floor(n/0x40)) .. char(0x80 + (n % 0x40)) + elseif n < 0x10000 then + return char(0xE0 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + elseif n < 0x40000 then + return char(0xF0 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + else -- wrong: + -- return char(0xF1 + floor(n/0x1000000)) .. char(0x80 + floor(n/0x40000)) .. char(0x80 + floor(n/0x1000)) .. char(0x80 + (floor(n/0x40) % 0x40)) .. char(0x80 + (n % 0x40)) + return "?" + end + end + +end + utf = utf or unicode.utf8 local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub local char, byte, find, bytepairs = string.char, string.byte, string.find, string.bytepairs -unicode = unicode or { } - -- 0 EF BB BF UTF-8 -- 1 FF FE UTF-16-little-endian -- 2 FE FF UTF-16-big-endian @@ -2530,14 +3041,20 @@ unicode.utfname = { [4] = 'utf-32-be' } -function unicode.utftype(f) -- \000 fails ! +-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated + +function unicode.utftype(f) local str = f:read(4) if not str then f:seek('set') return 0 - elseif find(str,"^%z%z\254\255") then + -- elseif find(str,"^%z%z\254\255") then -- depricated + -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged + elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH) return 4 - elseif find(str,"^\255\254%z%z") then + -- elseif find(str,"^\255\254%z%z") then -- depricated + -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged + elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH) return 3 elseif find(str,"^\254\255") then f:seek('set',2) @@ -2681,7 +3198,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-math'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2728,7 +3245,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-utils'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2736,6 +3253,10 @@ if not modules then modules = { } end modules ['l-utils'] = { -- hm, quite unreadable +local gsub = string.gsub +local concat = table.concat +local type, next = type, next + if not utils then utils = { } end if not utils.merger then utils.merger = { } end if not utils.lua then utils.lua = { } end @@ -2773,7 +3294,7 @@ function utils.merger._self_load_(name) end if data and utils.merger.strip_comment then -- saves some 20K - data = data:gsub("%-%-~[^\n\r]*[\r\n]", "") + data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "") end return data or "" end @@ -2791,7 +3312,7 @@ end function utils.merger._self_swap_(data,code) if data ~= "" then - return (data:gsub(utils.merger.pattern, function(s) + return (gsub(data,utils.merger.pattern, function(s) return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n" end, 1)) else @@ -2801,8 +3322,8 @@ end --~ stripper: --~ ---~ data = string.gsub(data,"%-%-~[^\n]*\n","") ---~ data = string.gsub(data,"\n\n+","\n") +--~ data = gsub(data,"%-%-~[^\n]*\n","") +--~ data = gsub(data,"\n\n+","\n") function utils.merger._self_libs_(libs,list) local result, f, frozen = { }, nil, false @@ -2810,9 +3331,10 @@ function utils.merger._self_libs_(libs,list) if type(libs) == 'string' then libs = { libs } end if type(list) == 'string' then list = { list } end local foundpath = nil - for _, lib in ipairs(libs) do - for _, pth in ipairs(list) do - pth = string.gsub(pth,"\\","/") -- file.clean_path + for i=1,#libs do + local lib = libs[i] + for j=1,#list do + local pth = gsub(list[j],"\\","/") -- file.clean_path utils.report("checking library path %s",pth) local name = pth .. "/" .. lib if lfs.isfile(name) then @@ -2824,7 +3346,8 @@ function utils.merger._self_libs_(libs,list) if foundpath then utils.report("using library path %s",foundpath) local right, wrong = { }, { } - for _, lib in ipairs(libs) do + for i=1,#libs do + local lib = libs[i] local fullname = foundpath .. "/" .. lib if lfs.isfile(fullname) then -- right[#right+1] = lib @@ -2838,15 +3361,15 @@ function utils.merger._self_libs_(libs,list) end end if #right > 0 then - utils.report("merged libraries: %s",table.concat(right," ")) + utils.report("merged libraries: %s",concat(right," ")) end if #wrong > 0 then - utils.report("skipped libraries: %s",table.concat(wrong," ")) + utils.report("skipped libraries: %s",concat(wrong," ")) end else utils.report("no valid library path found") end - return table.concat(result, "\n\n") + return concat(result, "\n\n") end function utils.merger.selfcreate(libs,list,target) @@ -2904,16 +3427,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end + aux = aux or { } local concat, format, gmatch = table.concat, string.format, string.gmatch local tostring, type = tostring, type +local lpegmatch = lpeg.match + +local P, R, V = lpeg.P, lpeg.R, lpeg.V + +local escape, left, right = P("\\"), P('{'), P('}') + +lpeg.patterns.balanced = P { + [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, + [2] = left * V(1) * right +} local space = lpeg.P(' ') local equal = lpeg.P("=") @@ -2921,7 +3456,7 @@ local comma = lpeg.P(",") local lbrace = lpeg.P("{") local rbrace = lpeg.P("}") local nobrace = 1 - (lbrace+rbrace) -local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace } +local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace } local spaces = space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) @@ -2959,13 +3494,13 @@ function aux.make_settings_to_hash_pattern(set,how) end end -function aux.settings_to_hash(str) +function aux.settings_to_hash(str,existing) if str and str ~= "" then - hash = { } + hash = existing or { } if moretolerant then - pattern_b_s:match(str) + lpegmatch(pattern_b_s,str) else - pattern_a_s:match(str) + lpegmatch(pattern_a_s,str) end return hash else @@ -2973,39 +3508,41 @@ function aux.settings_to_hash(str) end end -function aux.settings_to_hash_tolerant(str) +function aux.settings_to_hash_tolerant(str,existing) if str and str ~= "" then - hash = { } - pattern_b_s:match(str) + hash = existing or { } + lpegmatch(pattern_b_s,str) return hash else return { } end end -function aux.settings_to_hash_strict(str) +function aux.settings_to_hash_strict(str,existing) if str and str ~= "" then - hash = { } - pattern_c_s:match(str) + hash = existing or { } + lpegmatch(pattern_c_s,str) return next(hash) and hash else return nil end end -local seperator = comma * space^0 +local separator = comma * space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) -local pattern = lpeg.Ct(value*(seperator*value)^0) +local pattern = lpeg.Ct(value*(separator*value)^0) -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored aux.settings_to_array_pattern = pattern +-- we could use a weak table as cache + function aux.settings_to_array(str) if not str or str == "" then return { } else - return pattern:match(str) + return lpegmatch(pattern,str) end end @@ -3014,10 +3551,10 @@ local function set(t,v) end local value = lpeg.P(lpeg.Carg(1)*value) / set -local pattern = value*(seperator*value)^0 * lpeg.Carg(1) +local pattern = value*(separator*value)^0 * lpeg.Carg(1) function aux.add_settings_to_array(t,str) - return pattern:match(str, nil, t) + return lpegmatch(pattern,str,nil,t) end function aux.hash_to_string(h,separator,yes,no,strict,omit) @@ -3065,6 +3602,13 @@ function aux.settings_to_set(str,t) return t end +local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace +local pattern = lpeg.Ct((space + value)^0) + +function aux.arguments_to_table(str) + return lpegmatch(pattern,str) +end + -- temporary here function aux.getparameters(self,class,parentclass,settings) @@ -3073,36 +3617,31 @@ function aux.getparameters(self,class,parentclass,settings) sc = table.clone(self[parent]) self[class] = sc end - aux.add_settings_to_array(sc, settings) + aux.settings_to_hash(settings,sc) end -- temporary here -local digit = lpeg.R("09") -local period = lpeg.P(".") -local zero = lpeg.P("0") - ---~ local finish = lpeg.P(-1) ---~ local nodigit = (1-digit) + finish ---~ local case_1 = (period * zero^1 * #nodigit)/"" -- .000 ---~ local case_2 = (period * (1-(zero^0/"") * #nodigit)^1 * (zero^0/"") * nodigit) -- .010 .10 .100100 - +local digit = lpeg.R("09") +local period = lpeg.P(".") +local zero = lpeg.P("0") local trailingzeros = zero^0 * -digit -- suggested by Roberto R -local case_1 = period * trailingzeros / "" -local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") - -local number = digit^1 * (case_1 + case_2) -local stripper = lpeg.Cs((number + 1)^0) +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = lpeg.Cs((number + 1)^0) --~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" --~ collectgarbage("collect") --~ str = string.rep(sample,10000) --~ local ts = os.clock() ---~ stripper:match(str) ---~ print(#str, os.clock()-ts, stripper:match(sample)) +--~ lpegmatch(stripper,str) +--~ print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + +lpeg.patterns.strip_zeros = stripper function aux.strip_zeros(str) - return stripper:match(str) + return lpegmatch(stripper,str) end function aux.definetable(target) -- defines undefined tables @@ -3126,6 +3665,24 @@ function aux.accesstable(target) return t end +-- as we use this a lot ... + +--~ function aux.cachefunction(action,weak) +--~ local cache = { } +--~ if weak then +--~ setmetatable(cache, { __mode = "kv" } ) +--~ end +--~ local function reminder(str) +--~ local found = cache[str] +--~ if not found then +--~ found = action(str) +--~ cache[str] = found +--~ end +--~ return found +--~ end +--~ return reminder, cache +--~ end + end -- of closure @@ -3133,7 +3690,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-tra'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-tra.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3143,12 +3700,17 @@ if not modules then modules = { } end modules ['trac-tra'] = { -- bound to a variable, like node.new, node.copy etc (contrary to for instance -- node.has_attribute which is bound to a has_attribute local variable in mkiv) +local debug = require "debug" + +local getinfo = debug.getinfo +local type, next = type, next +local concat = table.concat +local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub + debugger = debugger or { } local counters = { } local names = { } -local getinfo = debug.getinfo -local format, find, lower, gmatch = string.format, string.find, string.lower, string.gmatch -- one @@ -3187,10 +3749,10 @@ function debugger.showstats(printer,threshold) local total, grandtotal, functions = 0, 0, 0 printer("\n") -- ugly but ok -- table.sort(counters) - for func, count in pairs(counters) do + for func, count in next, counters do if count > threshold then local name = getname(func) - if not name:find("for generator") then + if not find(name,"for generator") then printer(format("%8i %s", count, name)) total = total + count end @@ -3222,7 +3784,7 @@ end --~ local total, grandtotal, functions = 0, 0, 0 --~ printer("\n") -- ugly but ok --~ -- table.sort(counters) ---~ for func, count in pairs(counters) do +--~ for func, count in next, counters do --~ if count > threshold then --~ printer(format("%8i %s", count, func)) --~ total = total + count @@ -3276,38 +3838,77 @@ end --~ print("") --~ debugger.showstats(print,3) -trackers = trackers or { } +setters = setters or { } +setters.data = setters.data or { } -local data, done = { }, { } +--~ local function set(t,what,value) +--~ local data, done = t.data, t.done +--~ if type(what) == "string" then +--~ what = aux.settings_to_array(what) -- inefficient but ok +--~ end +--~ for i=1,#what do +--~ local w = what[i] +--~ for d, f in next, data do +--~ if done[d] then +--~ -- prevent recursion due to wildcards +--~ elseif find(d,w) then +--~ done[d] = true +--~ for i=1,#f do +--~ f[i](value) +--~ end +--~ end +--~ end +--~ end +--~ end -local function set(what,value) +local function set(t,what,value) + local data, done = t.data, t.done if type(what) == "string" then - what = aux.settings_to_array(what) + what = aux.settings_to_hash(what) -- inefficient but ok end - for i=1,#what do - local w = what[i] + for w, v in next, what do + if v == "" then + v = value + else + v = toboolean(v) + end for d, f in next, data do if done[d] then -- prevent recursion due to wildcards elseif find(d,w) then done[d] = true for i=1,#f do - f[i](value) + f[i](v) end end end end end -local function reset() - for d, f in next, data do +local function reset(t) + for d, f in next, t.data do for i=1,#f do f[i](false) end end end -function trackers.register(what,...) +local function enable(t,what) + set(t,what,true) +end + +local function disable(t,what) + local data = t.data + if not what or what == "" then + t.done = { } + reset(t) + else + set(t,what,false) + end +end + +function setters.register(t,what,...) + local data = t.data what = lower(what) local w = data[what] if not w then @@ -3319,32 +3920,32 @@ function trackers.register(what,...) if typ == "function" then w[#w+1] = fnc elseif typ == "string" then - w[#w+1] = function(value) set(fnc,value,nesting) end + w[#w+1] = function(value) set(t,fnc,value,nesting) end end end end -function trackers.enable(what) - done = { } - set(what,true) +function setters.enable(t,what) + local e = t.enable + t.enable, t.done = enable, { } + enable(t,string.simpleesc(tostring(what))) + t.enable, t.done = e, { } end -function trackers.disable(what) - done = { } - if not what or what == "" then - trackers.reset(what) - else - set(what,false) - end +function setters.disable(t,what) + local e = t.disable + t.disable, t.done = disable, { } + disable(t,string.simpleesc(tostring(what))) + t.disable, t.done = e, { } end -function trackers.reset(what) - done = { } - reset() +function setters.reset(t) + t.done = { } + reset(t) end -function trackers.list() -- pattern - local list = table.sortedkeys(data) +function setters.list(t) -- pattern + local list = table.sortedkeys(t.data) local user, system = { }, { } for l=1,#list do local what = list[l] @@ -3357,6 +3958,78 @@ function trackers.list() -- pattern return user, system end +function setters.show(t) + commands.writestatus("","") + local list = setters.list(t) + for k=1,#list do + commands.writestatus(t.name,list[k]) + end + commands.writestatus("","") +end + +-- we could have used a bit of oo and the trackers:enable syntax but +-- there is already a lot of code around using the singular tracker + +-- we could make this into a module + +function setters.new(name) + local t + t = { + data = { }, + name = name, + enable = function(...) setters.enable (t,...) end, + disable = function(...) setters.disable (t,...) end, + register = function(...) setters.register(t,...) end, + list = function(...) setters.list (t,...) end, + show = function(...) setters.show (t,...) end, + } + setters.data[name] = t + return t +end + +trackers = setters.new("trackers") +directives = setters.new("directives") +experiments = setters.new("experiments") + +-- nice trick: we overload two of the directives related functions with variants that +-- do tracing (itself using a tracker) .. proof of concept + +local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) +local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) + +local e = directives.enable +local d = directives.disable + +function directives.enable(...) + commands.writestatus("directives","enabling: %s",concat({...}," ")) + e(...) +end + +function directives.disable(...) + commands.writestatus("directives","disabling: %s",concat({...}," ")) + d(...) +end + +local e = experiments.enable +local d = experiments.disable + +function experiments.enable(...) + commands.writestatus("experiments","enabling: %s",concat({...}," ")) + e(...) +end + +function experiments.disable(...) + commands.writestatus("experiments","disabling: %s",concat({...}," ")) + d(...) +end + +-- a useful example + +directives.register("system.nostatistics", function(v) + statistics.enable = not v +end) + + end -- of closure @@ -3364,7 +4037,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-env'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3376,10 +4049,10 @@ if not modules then modules = { } end modules ['luat-env'] = { -- evolved before bytecode arrays were available and so a lot of -- code has disappeared already. -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local format = string.format +local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find +local unquote, quote = string.unquote, string.quote -- precautions @@ -3413,13 +4086,14 @@ if not environment.jobname then environ function environment.initialize_arguments(arg) local arguments, files = { }, { } environment.arguments, environment.files, environment.sortedflags = arguments, files, nil - for index, argument in pairs(arg) do + for index=1,#arg do + local argument = arg[index] if index > 0 then - local flag, value = argument:match("^%-+(.+)=(.-)$") + local flag, value = match(argument,"^%-+(.-)=(.-)$") if flag then - arguments[flag] = string.unquote(value or "") + arguments[flag] = unquote(value or "") else - flag = argument:match("^%-+(.+)") + flag = match(argument,"^%-+(.+)") if flag then arguments[flag] = true else @@ -3446,25 +4120,30 @@ function environment.argument(name,partial) return arguments[name] elseif partial then if not sortedflags then - sortedflags = { } - for _,v in pairs(table.sortedkeys(arguments)) do - sortedflags[#sortedflags+1] = "^" .. v + sortedflags = table.sortedkeys(arguments) + for k=1,#sortedflags do + sortedflags[k] = "^" .. sortedflags[k] end environment.sortedflags = sortedflags end -- example of potential clash: ^mode ^modefile - for _,v in ipairs(sortedflags) do - if name:find(v) then - return arguments[v:sub(2,#v)] + for k=1,#sortedflags do + local v = sortedflags[k] + if find(name,v) then + return arguments[sub(v,2,#v)] end end end return nil end +environment.argument("x",true) + function environment.split_arguments(separator) -- rather special, cut-off before separator local done, before, after = false, { }, { } - for _,v in ipairs(environment.original_arguments) do + local original_arguments = environment.original_arguments + for k=1,#original_arguments do + local v = original_arguments[k] if not done and v == separator then done = true elseif done then @@ -3481,16 +4160,17 @@ function environment.reconstruct_commandline(arg,noquote) if noquote and #arg == 1 then local a = arg[1] a = resolvers.resolve(a) - a = a:unquote() + a = unquote(a) return a - elseif next(arg) then + elseif #arg > 0 then local result = { } - for _,a in ipairs(arg) do -- ipairs 1 .. #n + for i=1,#arg do + local a = arg[i] a = resolvers.resolve(a) - a = a:unquote() - a = a:gsub('"','\\"') -- tricky - if a:find(" ") then - result[#result+1] = a:quote() + a = unquote(a) + a = gsub(a,'"','\\"') -- tricky + if find(a," ") then + result[#result+1] = quote(a) else result[#result+1] = a end @@ -3503,17 +4183,18 @@ end if arg then - -- new, reconstruct quoted snippets (maybe better just remnove the " then and add them later) + -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) local newarg, instring = { }, false - for index, argument in ipairs(arg) do - if argument:find("^\"") then - newarg[#newarg+1] = argument:gsub("^\"","") - if not argument:find("\"$") then + for index=1,#arg do + local argument = arg[index] + if find(argument,"^\"") then + newarg[#newarg+1] = gsub(argument,"^\"","") + if not find(argument,"\"$") then instring = true end - elseif argument:find("\"$") then - newarg[#newarg] = newarg[#newarg] .. " " .. argument:gsub("\"$","") + elseif find(argument,"\"$") then + newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") instring = false elseif instring then newarg[#newarg] = newarg[#newarg] .. " " .. argument @@ -3568,12 +4249,12 @@ function environment.luafilechunk(filename) -- used for loading lua bytecode in filename = file.replacesuffix(filename, "lua") local fullname = environment.luafile(filename) if fullname and fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading file %s", fullname) end return environment.loadedluacode(fullname) else - if trace_verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end return nil @@ -3593,7 +4274,7 @@ function environment.loadluafile(filename, version) -- when not overloaded by explicit suffix we look for a luc file first local fullname = (lucname and environment.luafile(lucname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check @@ -3611,7 +4292,7 @@ function environment.loadluafile(filename, version) if v == version then return true else - if trace_verbose then + if trace_locating then logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version) end environment.loadluafile(filename) @@ -3622,12 +4303,12 @@ function environment.loadluafile(filename, version) end fullname = (luaname and environment.luafile(luaname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check if not chunk then - if verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end else @@ -3645,7 +4326,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-inf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-inf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3670,6 +4351,14 @@ function statistics.hastimer(instance) return instance and instance.starttime end +function statistics.resettiming(instance) + if not instance then + notimer = { timing = 0, loadtime = 0 } + else + instance.timing, instance.loadtime = 0, 0 + end +end + function statistics.starttiming(instance) if not instance then notimer = { } @@ -3684,6 +4373,8 @@ function statistics.starttiming(instance) if not instance.loadtime then instance.loadtime = 0 end + else +--~ logs.report("system","nested timing (%s)",tostring(instance)) end instance.timing = it + 1 end @@ -3729,6 +4420,12 @@ function statistics.elapsedindeed(instance) return t > statistics.threshold end +function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds + if statistics.elapsedindeed(instance) then + return format("%s seconds %s", statistics.elapsedtime(instance),rest or "") + end +end + -- general function function statistics.register(tag,fnc) @@ -3807,14 +4504,32 @@ function statistics.timed(action,report) report("total runtime: %s",statistics.elapsedtime(timer)) end +-- where, not really the best spot for this: + +commands = commands or { } + +local timer + +function commands.resettimer() + statistics.resettiming(timer) + statistics.starttiming(timer) +end + +function commands.elapsedtime() + statistics.stoptiming(timer) + tex.sprint(statistics.elapsedtime(timer)) +end + +commands.resettimer() + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['luat-log'] = { +if not modules then modules = { } end modules ['trac-log'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-log.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -3822,7 +4537,11 @@ if not modules then modules = { } end modules ['luat-log'] = { -- this is old code that needs an overhaul -local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format +--~ io.stdout:setvbuf("no") +--~ io.stderr:setvbuf("no") + +local write_nl, write = texio.write_nl or print, texio.write or io.write +local format, gmatch = string.format, string.gmatch local texcount = tex and tex.count if texlua then @@ -3903,25 +4622,48 @@ function logs.tex.line(fmt,...) -- new end end +--~ function logs.tex.start_page_number() +--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +--~ if real > 0 then +--~ if user > 0 then +--~ if sub > 0 then +--~ write(format("[%s.%s.%s",real,user,sub)) +--~ else +--~ write(format("[%s.%s",real,user)) +--~ end +--~ else +--~ write(format("[%s",real)) +--~ end +--~ else +--~ write("[-") +--~ end +--~ end + +--~ function logs.tex.stop_page_number() +--~ write("]") +--~ end + +local real, user, sub + function logs.tex.start_page_number() - local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno + real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +end + +function logs.tex.stop_page_number() if real > 0 then if user > 0 then if sub > 0 then - write(format("[%s.%s.%s",real,user,sub)) + logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub) else - write(format("[%s.%s",real,user)) + logs.report("pages", "flushing realpage %s, userpage %s",real,user) end else - write(format("[%s",real)) + logs.report("pages", "flushing realpage %s",real) end else - write("[-") + logs.report("pages", "flushing page") end -end - -function logs.tex.stop_page_number() - write("]") + io.flush() end logs.tex.report_job_stat = statistics.show_job_stat @@ -4021,7 +4763,7 @@ end function logs.setprogram(_name_,_banner_,_verbose_) name, banner = _name_, _banner_ if _verbose_ then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") end logs.set_method("tex") logs.report = report -- also used in libraries @@ -4034,9 +4776,9 @@ end function logs.setverbose(what) if what then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") else - trackers.disable("resolvers.verbose") + trackers.disable("resolvers.locating") end logs.verbose = what or false end @@ -4053,7 +4795,7 @@ logs.report = logs.tex.report logs.simple = logs.tex.report function logs.reportlines(str) -- todo: - for line in str:gmatch("(.-)[\n\r]") do + for line in gmatch(str,"(.-)[\n\r]") do logs.report(line) end end @@ -4064,8 +4806,12 @@ end logs.simpleline = logs.reportline -function logs.help(message,option) +function logs.reportbanner() -- for scripts too logs.report(banner) +end + +function logs.help(message,option) + logs.reportbanner() logs.reportline() logs.reportlines(message) local moreinfo = logs.moreinfo or "" @@ -4097,6 +4843,11 @@ end --~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") --~ end +function logs.fatal(where,...) + logs.report(where,"fatal error: %s, aborting now",format(...)) + os.exit() +end + end -- of closure @@ -4104,10 +4855,10 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", - comment = "companion to luat-lib.tex", } -- After a few years using the code the large luat-inp.lua file @@ -4119,7 +4870,7 @@ if not modules then modules = { } end modules ['data-inp'] = { -- * some public auxiliary functions were made private -- -- TODO: os.getenv -> os.env[] --- TODO: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller) +-- TODO: instances.[hashes,cnffiles,configurations,522] -- TODO: check escaping in find etc, too much, too slow -- This lib is multi-purpose and can be loaded again later on so that @@ -4140,12 +4891,13 @@ if not modules then modules = { } end modules ['data-inp'] = { local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys local next, type = next, type +local lpegmatch = lpeg.match -local trace_locating, trace_detail, trace_verbose = false, false, false +local trace_locating, trace_detail, trace_expansions = false, false, false -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) -trackers.register("resolvers.detail", function(v) trace_detail = v trackers.enable("resolvers.verbose,resolvers.detail") end) +trackers.register("resolvers.locating", function(v) trace_locating = v end) +trackers.register("resolvers.details", function(v) trace_detail = v end) +trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo if not resolvers then resolvers = { @@ -4169,7 +4921,7 @@ resolvers.generators.notfound = { nil } resolvers.cacheversion = '1.0.1' resolvers.cnfname = 'texmf.cnf' resolvers.luaname = 'texmfcnf.lua' -resolvers.homedir = os.env[os.platform == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' +resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}' local dummy_path_expr = "^!*unset/*$" @@ -4211,8 +4963,8 @@ suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' } alternatives['map files'] = 'map' alternatives['enc files'] = 'enc' -alternatives['cid files'] = 'cid' -alternatives['fea files'] = 'fea' +alternatives['cid maps'] = 'cid' -- great, why no cid files +alternatives['font feature files'] = 'fea' -- and fea files here alternatives['opentype fonts'] = 'otf' alternatives['truetype fonts'] = 'ttf' alternatives['truetype collections'] = 'ttc' @@ -4228,6 +4980,11 @@ formats ['sfd'] = 'SFDFONTS' suffixes ['sfd'] = { 'sfd' } alternatives['subfont definition files'] = 'sfd' +-- lib paths + +formats ['lib'] = 'CLUAINPUTS' -- new (needs checking) +suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' } + -- In practice we will work within one tds tree, but i want to keep -- the option open to build tools that look at multiple trees, which is -- why we keep the tree specific data in a table. We used to pass the @@ -4350,8 +5107,10 @@ local function check_configuration() -- not yet ok, no time for debugging now -- bad luck end fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm - fix("FONTFEATURES", ".;$TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") - fix("FONTCIDMAPS" , ".;$TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- this will go away some day + fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//") end @@ -4366,7 +5125,7 @@ function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail' end end -resolvers.settrace(os.getenv("MTX.resolvers.TRACE") or os.getenv("MTX_INPUT_TRACE")) +resolvers.settrace(os.getenv("MTX_INPUT_TRACE")) function resolvers.osenv(key) local ie = instance.environment @@ -4454,37 +5213,43 @@ end -- work that well; the parsing is ok, but dealing with the resulting -- table is a pain because we need to work inside-out recursively +local function do_first(a,b) + local t = { } + for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end + return "{" .. concat(t,",") .. "}" +end + +local function do_second(a,b) + local t = { } + for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end + return "{" .. concat(t,",") .. "}" +end + +local function do_both(a,b) + local t = { } + for sa in gmatch(a,"[^,]+") do + for sb in gmatch(b,"[^,]+") do + t[#t+1] = sa .. sb + end + end + return "{" .. concat(t,",") .. "}" +end + +local function do_three(a,b,c) + return a .. b.. c +end + local function splitpathexpr(str, t, validate) -- no need for further optimization as it is only called a - -- few times, we can use lpeg for the sub; we could move - -- the local functions outside the body + -- few times, we can use lpeg for the sub + if trace_expansions then + logs.report("fileio","expanding variable '%s'",str) + end t = t or { } str = gsub(str,",}",",@}") str = gsub(str,"{,","{@,") -- str = "@" .. str .. "@" local ok, done - local function do_first(a,b) - local t = { } - for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end - return "{" .. concat(t,",") .. "}" - end - local function do_second(a,b) - local t = { } - for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end - return "{" .. concat(t,",") .. "}" - end - local function do_both(a,b) - local t = { } - for sa in gmatch(a,"[^,]+") do - for sb in gmatch(b,"[^,]+") do - t[#t+1] = sa .. sb - end - end - return "{" .. concat(t,",") .. "}" - end - local function do_three(a,b,c) - return a .. b.. c - end while true do done = false while true do @@ -4515,6 +5280,11 @@ local function splitpathexpr(str, t, validate) t[#t+1] = s end end + if trace_expansions then + for k=1,#t do + logs.report("fileio","% 4i: %s",k,t[k]) + end + end return t end @@ -4554,18 +5324,27 @@ end -- also we now follow the stupid route: if not set then just assume *one* -- cnf file under texmf (i.e. distribution) -resolvers.ownpath = resolvers.ownpath or nil -resolvers.ownbin = resolvers.ownbin or arg[-2] or arg[-1] or arg[0] or "luatex" -resolvers.autoselfdir = true -- false may be handy for debugging +local args = environment and environment.original_arguments or arg -- this needs a cleanup + +resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" +resolvers.ownbin = gsub(resolvers.ownbin,"\\","/") function resolvers.getownpath() - if not resolvers.ownpath then - if resolvers.autoselfdir and os.selfdir then - resolvers.ownpath = os.selfdir - else - local binary = resolvers.ownbin - if os.platform == "windows" then - binary = file.replacesuffix(binary,"exe") + local ownpath = resolvers.ownpath or os.selfdir + if not ownpath or ownpath == "" or ownpath == "unset" then + ownpath = args[-1] or arg[-1] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + if not ownpath or ownpath == "" then + ownpath = args[-0] or arg[-0] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + end + local binary = resolvers.ownbin + if not ownpath or ownpath == "" then + ownpath = ownpath and file.dirname(binary) + end + if not ownpath or ownpath == "" then + if os.binsuffix ~= "" then + binary = file.replacesuffix(binary,os.binsuffix) end for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do local b = file.join(p,binary) @@ -4577,30 +5356,39 @@ function resolvers.getownpath() local olddir = lfs.currentdir() if lfs.chdir(p) then local pp = lfs.currentdir() - if trace_verbose and p ~= pp then - logs.report("fileio","following symlink %s to %s",p,pp) + if trace_locating and p ~= pp then + logs.report("fileio","following symlink '%s' to '%s'",p,pp) end - resolvers.ownpath = pp + ownpath = pp lfs.chdir(olddir) else - if trace_verbose then - logs.report("fileio","unable to check path %s",p) + if trace_locating then + logs.report("fileio","unable to check path '%s'",p) end - resolvers.ownpath = p + ownpath = p end break end end end - if not resolvers.ownpath then resolvers.ownpath = '.' end + if not ownpath or ownpath == "" then + ownpath = "." + logs.report("fileio","forcing fallback ownpath .") + elseif trace_locating then + logs.report("fileio","using ownpath '%s'",ownpath) + end + end + resolvers.ownpath = ownpath + function resolvers.getownpath() + return resolvers.ownpath end - return resolvers.ownpath + return ownpath end local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" } local function identify_own() - local ownpath = resolvers.getownpath() or lfs.currentdir() + local ownpath = resolvers.getownpath() or dir.current() local ie = instance.environment if ownpath then if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end @@ -4613,10 +5401,10 @@ local function identify_own() if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end - if trace_verbose then + if trace_locating then for i=1,#own_places do local v = own_places[i] - logs.report("fileio","variable %s set to %s",v,resolvers.env(v) or "unknown") + logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown") end end identify_own = function() end @@ -4648,10 +5436,8 @@ end local function load_cnf_file(fname) fname = resolvers.clean_path(fname) local lname = file.replacesuffix(fname,'lua') - local f = io.open(lname) - if f then -- this will go - f:close() - local dname = file.dirname(fname) + if lfs.isfile(lname) then + local dname = file.dirname(fname) -- fname ? if not instance.configuration[dname] then resolvers.load_data(dname,'configuration',lname and file.basename(lname)) instance.order[#instance.order+1] = instance.configuration[dname] @@ -4659,8 +5445,8 @@ local function load_cnf_file(fname) else f = io.open(fname) if f then - if trace_verbose then - logs.report("fileio","loading %s", fname) + if trace_locating then + logs.report("fileio","loading configuration file %s", fname) end local line, data, n, k, v local dname = file.dirname(fname) @@ -4694,14 +5480,16 @@ local function load_cnf_file(fname) end end f:close() - elseif trace_verbose then - logs.report("fileio","skipping %s", fname) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'", fname) end end end local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared) - for _,c in ipairs(instance.order) do + local order = instance.order + for i=1,#order do + local c = order[i] for k,v in next, c do if not instance.variables[k] then if instance.environment[k] then @@ -4717,19 +5505,24 @@ end function resolvers.load_cnf() local function loadoldconfigdata() - for _, fname in ipairs(instance.cnffiles) do - load_cnf_file(fname) + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + load_cnf_file(cnffiles[i]) end end -- instance.cnffiles contain complete names now ! + -- we still use a funny mix of cnf and new but soon + -- we will switch to lua exclusively as we only use + -- the file to collect the tree roots if #instance.cnffiles == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)") end else - instance.rootpath = instance.cnffiles[1] - for k,fname in ipairs(instance.cnffiles) do - instance.cnffiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local cnffiles = instance.cnffiles + instance.rootpath = cnffiles[1] + for k=1,#cnffiles do + instance.cnffiles[k] = file.collapse_path(cnffiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -4757,8 +5550,9 @@ function resolvers.load_lua() -- yet harmless else instance.rootpath = instance.luafiles[1] - for k,fname in ipairs(instance.luafiles) do - instance.luafiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local luafiles = instance.luafiles + for k=1,#luafiles do + instance.luafiles[k] = file.collapse_path(luafiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -4790,14 +5584,14 @@ end function resolvers.append_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash append: %s",tag) + logs.report("fileio","hash '%s' appended",tag) end insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } ) end function resolvers.prepend_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash prepend: %s",tag) + logs.report("fileio","hash '%s' prepended",tag) end insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } ) end @@ -4821,9 +5615,11 @@ end -- locators function resolvers.locatelists() - for _, path in ipairs(resolvers.clean_path_list('TEXMF')) do - if trace_verbose then - logs.report("fileio","locating list of %s",path) + local texmfpaths = resolvers.clean_path_list('TEXMF') + for i=1,#texmfpaths do + local path = texmfpaths[i] + if trace_locating then + logs.report("fileio","locating list of '%s'",path) end resolvers.locatedatabase(file.collapse_path(path)) end @@ -4836,11 +5632,11 @@ end function resolvers.locators.tex(specification) if specification and specification ~= '' and lfs.isdir(specification) then if trace_locating then - logs.report("fileio",'! tex locator found: %s',specification) + logs.report("fileio","tex locator '%s' found",specification) end resolvers.append_hash('file',specification,filename) elseif trace_locating then - logs.report("fileio",'? tex locator not found: %s',specification) + logs.report("fileio","tex locator '%s' not found",specification) end end @@ -4854,7 +5650,9 @@ function resolvers.loadfiles() instance.loaderror = false instance.files = { } if not instance.renewcache then - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for k=1,#hashes do + local hash = hashes[k] resolvers.hashdatabase(hash.tag,hash.name) if instance.loaderror then break end end @@ -4868,8 +5666,9 @@ end -- generators: function resolvers.loadlists() - for _, hash in ipairs(instance.hashes) do - resolvers.generatedatabase(hash.tag) + local hashes = instance.hashes + for i=1,#hashes do + resolvers.generatedatabase(hashes[i].tag) end end @@ -4881,10 +5680,27 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.patterns.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification - if trace_verbose then - logs.report("fileio","scanning path %s",specification) + if trace_locating then + logs.report("fileio","scanning path '%s'",specification) end instance.files[tag] = { } local files = instance.files[tag] @@ -4900,7 +5716,8 @@ function resolvers.generators.tex(specification) full = spec end for name in directory(full) do - if not weird:match(name) then + if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then @@ -4933,7 +5750,7 @@ function resolvers.generators.tex(specification) end end action() - if trace_verbose then + if trace_locating then logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r) end end @@ -4948,11 +5765,48 @@ end -- we join them and split them after the expansion has taken place. This -- is more convenient. +--~ local checkedsplit = string.checkedsplit + +local cache = { } + +local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;"))) + +local function split_kpse_path(str) -- beware, this can be either a path or a {specification} + local found = cache[str] + if not found then + if str == "" then + found = { } + else + str = gsub(str,"\\","/") +--~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator) +local split = lpegmatch(splitter,str) + found = { } + for i=1,#split do + local s = split[i] + if not find(s,"^{*unset}*") then + found[#found+1] = s + end + end + if trace_expansions then + logs.report("fileio","splitting path specification '%s'",str) + for k=1,#found do + logs.report("fileio","% 4i: %s",k,found[k]) + end + end + cache[str] = found + end + end + return found +end + +resolvers.split_kpse_path = split_kpse_path + function resolvers.splitconfig() - for i,c in ipairs(instance) do - for k,v in pairs(c) do + for i=1,#instance do + local c = instance[i] + for k,v in next, c do if type(v) == 'string' then - local t = file.split_path(v) + local t = split_kpse_path(v) if #t > 1 then c[k] = t end @@ -4962,21 +5816,25 @@ function resolvers.splitconfig() end function resolvers.joinconfig() - for i,c in ipairs(instance.order) do - for k,v in pairs(c) do -- ipairs? + local order = instance.order + for i=1,#order do + local c = order[i] + for k,v in next, c do -- indexed? if type(v) == 'table' then c[k] = file.join_path(v) end end end end + function resolvers.split_path(str) if type(str) == 'table' then return str else - return file.split_path(str) + return split_kpse_path(str) end end + function resolvers.join_path(str) if type(str) == 'table' then return file.join_path(str) @@ -4988,8 +5846,9 @@ end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do - local t, h = { }, { } - for _,vv in ipairs(file.split_path(v)) do + local t, h, p = { }, { }, split_kpse_path(v) + for kk=1,#p do + local vv = p[kk] if vv ~= "" and not h[vv] then t[#t+1] = vv h[vv] = true @@ -5036,11 +5895,15 @@ function resolvers.serialize(files) end t[#t+1] = "return {" if instance.sortdata then - for _, k in pairs(sortedkeys(files)) do -- ipairs + local sortedfiles = sortedkeys(files) + for i=1,#sortedfiles do + local k = sortedfiles[i] local fk = files[k] if type(fk) == 'table' then t[#t+1] = "\t['" .. k .. "']={" - for _, kk in pairs(sortedkeys(fk)) do -- ipairs + local sortedfk = sortedkeys(fk) + for j=1,#sortedfk do + local kk = sortedfk[j] t[#t+1] = dump(kk,fk[kk],"\t\t") end t[#t+1] = "\t}," @@ -5065,12 +5928,18 @@ function resolvers.serialize(files) return concat(t,"\n") end +local data_state = { } + +function resolvers.data_state() + return data_state or { } +end + function resolvers.save_data(dataname, makename) -- untested without cache overload for cachename, files in next, instance[dataname] do local name = (makename or file.join)(cachename,dataname) local luaname, lucname = name .. ".lua", name .. ".luc" - if trace_verbose then - logs.report("fileio","preparing %s for %s",dataname,cachename) + if trace_locating then + logs.report("fileio","preparing '%s' for '%s'",dataname,cachename) end for k, v in next, files do if type(v) == "table" and #v == 1 then @@ -5084,24 +5953,25 @@ function resolvers.save_data(dataname, makename) -- untested without cache overl date = os.date("%Y-%m-%d"), time = os.date("%H:%M:%S"), content = files, + uuid = os.uuid(), } local ok = io.savedata(luaname,resolvers.serialize(data)) if ok then - if trace_verbose then - logs.report("fileio","%s saved in %s",dataname,luaname) + if trace_locating then + logs.report("fileio","'%s' saved in '%s'",dataname,luaname) end if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip - if trace_verbose then - logs.report("fileio","%s compiled to %s",dataname,lucname) + if trace_locating then + logs.report("fileio","'%s' compiled to '%s'",dataname,lucname) end else - if trace_verbose then - logs.report("fileio","compiling failed for %s, deleting file %s",dataname,lucname) + if trace_locating then + logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname) end os.remove(lucname) end - elseif trace_verbose then - logs.report("fileio","unable to save %s in %s (access error)",dataname,luaname) + elseif trace_locating then + logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname) end end end @@ -5113,19 +5983,20 @@ function resolvers.load_data(pathname,dataname,filename,makename) -- untested wi if blob then local data = blob() if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then - if trace_verbose then - logs.report("fileio","loading %s for %s from %s",dataname,pathname,filename) + data_state[#data_state+1] = data.uuid + if trace_locating then + logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = data.content else - if trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + if trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + elseif trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end end @@ -5144,15 +6015,17 @@ function resolvers.resetconfig() end function resolvers.loadnewconfig() - for _, cnf in ipairs(instance.luafiles) do + local luafiles = instance.luafiles + for i=1,#luafiles do + local cnf = luafiles[i] local pathname = file.dirname(cnf) local filename = file.join(pathname,resolvers.luaname) local blob = loadfile(filename) if blob then local data = blob() if data then - if trace_verbose then - logs.report("fileio","loading configuration file %s",filename) + if trace_locating then + logs.report("fileio","loading configuration file '%s'",filename) end if true then -- flatten to variable.progname @@ -5173,14 +6046,14 @@ function resolvers.loadnewconfig() instance['setup'][pathname] = data end else - if trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + if trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance['setup'][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance.order[#instance.order+1] = instance.setup[pathname] if instance.loaderror then break end @@ -5189,7 +6062,9 @@ end function resolvers.loadoldconfig() if not instance.renewcache then - for _, cnf in ipairs(instance.cnffiles) do + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + local cnf = cnffiles[i] local dname = file.dirname(cnf) resolvers.load_data(dname,'configuration') instance.order[#instance.order+1] = instance.configuration[dname] @@ -5379,7 +6254,7 @@ end function resolvers.expanded_path_list(str) if not str then - return ep or { } + return ep or { } -- ep ? elseif instance.savelists then -- engine+progname hash str = gsub(str,"%$","") @@ -5397,9 +6272,9 @@ end function resolvers.expanded_path_list_from_var(str) -- brrr local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$","")) if tmp ~= "" then - return resolvers.expanded_path_list(str) - else return resolvers.expanded_path_list(tmp) + else + return resolvers.expanded_path_list(str) end end @@ -5446,9 +6321,9 @@ function resolvers.isreadable.file(name) local readable = lfs.isfile(name) -- brrr if trace_detail then if readable then - logs.report("fileio","+ readable: %s",name) + logs.report("fileio","file '%s' is readable",name) else - logs.report("fileio","- readable: %s", name) + logs.report("fileio","file '%s' is not readable", name) end end return readable @@ -5464,7 +6339,7 @@ local function collect_files(names) for k=1,#names do local fname = names[k] if trace_detail then - logs.report("fileio","? blobpath asked: %s",fname) + logs.report("fileio","checking name '%s'",fname) end local bname = file.basename(fname) local dname = file.dirname(fname) @@ -5480,7 +6355,7 @@ local function collect_files(names) local files = blobpath and instance.files[blobpath] if files then if trace_detail then - logs.report("fileio",'? blobpath do: %s (%s)',blobpath,bname) + logs.report("fileio","deep checking '%s' (%s)",blobpath,bname) end local blobfile = files[bname] if not blobfile then @@ -5514,7 +6389,7 @@ local function collect_files(names) end end elseif trace_locating then - logs.report("fileio",'! blobpath no: %s (%s)',blobpath,bname) + logs.report("fileio","no match in '%s' (%s)",blobpath,bname) end end end @@ -5564,14 +6439,13 @@ end local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc) local result = collected or { } local stamp = nil - filename = file.collapse_path(filename) -- elsewhere - filename = file.collapse_path(gsub(filename,"\\","/")) -- elsewhere + filename = file.collapse_path(filename) -- speed up / beware: format problem if instance.remember then stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format if instance.found[stamp] then if trace_locating then - logs.report("fileio",'! remembered: %s',filename) + logs.report("fileio","remembering file '%s'",filename) end return instance.found[stamp] end @@ -5579,7 +6453,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not dangerous[instance.format or "?"] then if resolvers.isreadable.file(filename) then if trace_detail then - logs.report("fileio",'= found directly: %s',filename) + logs.report("fileio","file '%s' found directly",filename) end instance.found[stamp] = { filename } return { filename } @@ -5587,13 +6461,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end if find(filename,'%*') then if trace_locating then - logs.report("fileio",'! wildcard: %s', filename) + logs.report("fileio","checking wildcard '%s'", filename) end result = resolvers.find_wildcard_files(filename) elseif file.is_qualified_path(filename) then if resolvers.isreadable.file(filename) then if trace_locating then - logs.report("fileio",'! qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end result = { filename } else @@ -5603,7 +6477,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. ".tex" if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing standard filetype: tex') + logs.report("fileio","no suffix, forcing standard filetype 'tex'") end result, ok = { forcedname }, true end @@ -5613,7 +6487,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. "." .. s if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing format filetype: %s', s) + logs.report("fileio","no suffix, forcing format filetype '%s'", s) end result, ok = { forcedname }, true break @@ -5625,7 +6499,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- try to find in tree (no suffix manipulation), here we search for the -- matching last part of the name local basename = file.basename(filename) - local pattern = (filename .. "$"):gsub("([%.%-])","%%%1") + local pattern = gsub(filename .. "$","([%.%-])","%%%1") local savedformat = instance.format local format = savedformat or "" if format == "" then @@ -5635,19 +6509,21 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan instance.format = "othertextfiles" -- kind of everything, maybe texinput is better end -- - local resolved = collect_instance_files(basename) - if #result == 0 then - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered) + if basename ~= filename then + local resolved = collect_instance_files(basename) + if #result == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered) + end end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if rr:find(pattern) then - result[#result+1], ok = rr, true + resolvers.format = savedformat + -- + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1], ok = rr, true + end end end -- a real wildcard: @@ -5656,14 +6532,14 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- local filelist = collect_files({basename}) -- for f=1,#filelist do -- local ff = filelist[f][3] or "" - -- if ff:find(pattern) then + -- if find(ff,pattern) then -- result[#result+1], ok = ff, true -- end -- end -- end end if not ok and trace_locating then - logs.report("fileio",'? qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end end else @@ -5682,12 +6558,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan wantedfiles[#wantedfiles+1] = forcedname filetype = resolvers.format_of_suffix(forcedname) if trace_locating then - logs.report("fileio",'! forcing filetype: %s',filetype) + logs.report("fileio","forcing filetype '%s'",filetype) end else filetype = resolvers.format_of_suffix(filename) if trace_locating then - logs.report("fileio",'! using suffix based filetype: %s',filetype) + logs.report("fileio","using suffix based filetype '%s'",filetype) end end else @@ -5699,7 +6575,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end filetype = instance.format if trace_locating then - logs.report("fileio",'! using given filetype: %s',filetype) + logs.report("fileio","using given filetype '%s'",filetype) end end local typespec = resolvers.variable_of_format(filetype) @@ -5707,9 +6583,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then - logs.report("fileio",'? filename: %s',filename) - logs.report("fileio",'? filetype: %s',filetype or '?') - logs.report("fileio",'? wanted files: %s',concat(wantedfiles," | ")) + logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end for k=1,#wantedfiles do local fname = wantedfiles[k] @@ -5730,36 +6604,59 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan else -- list search local filelist = collect_files(wantedfiles) - local doscan, recurse + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = file.dirname(filelist[i][2]) .. "/" + end + end if trace_detail then - logs.report("fileio",'? filename: %s',filename) + logs.report("fileio","checking filename '%s'",filename) end -- a bit messy ... esp the doscan setting here + local doscan for k=1,#pathlist do local path = pathlist[k] if find(path,"^!!") then doscan = false else doscan = true end - if find(path,"//$") then recurse = true else recurse = false end local pathname = gsub(path,"^!+", '') done = false -- using file list - if filelist and not (done and not instance.allresults) and recurse then - -- compare list entries with permitted pattern - pathname = gsub(pathname,"([%-%.])","%%%1") -- this also influences - pathname = gsub(pathname,"/+$", '/.*') -- later usage of pathname - pathname = gsub(pathname,"//", '/.-/') -- not ok for /// but harmless - local expr = "^" .. pathname + if filelist then + local expression + -- compare list entries with permitted pattern -- /xx /xx// + if not find(pathname,"/$") then + expression = pathname .. "/" + else + expression = pathname + end + expression = gsub(expression,"([%-%.])","%%%1") -- this also influences + expression = gsub(expression,"//+$", '/.*') -- later usage of pathname + expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless + expression = "^" .. expression .. "$" + if trace_detail then + logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname) + end for k=1,#filelist do local fl = filelist[k] local f = fl[2] - if find(f,expr) then - if trace_detail then - logs.report("fileio",'= found in hash: %s',f) - end + local d = dirlist[k] + if find(d,expression) then --- todo, test for readable result[#result+1] = fl[3] resolvers.register_in_trees(f) -- for tracing used files done = true - if not instance.allresults then break end + if instance.allresults then + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d) + end + else + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d) + end + break + end + elseif trace_detail then + logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d) end end end @@ -5775,7 +6672,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan local fname = file.join(ppname,w) if resolvers.isreadable.file(fname) then if trace_detail then - logs.report("fileio",'= found by scanning: %s',fname) + logs.report("fileio","found '%s' by scanning",fname) end result[#result+1] = fname done = true @@ -5838,7 +6735,7 @@ function resolvers.find_given_files(filename) local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] - local files = instance.files[hash.tag] + local files = instance.files[hash.tag] or { } local blist = files[bname] if not blist then local rname = "remap:"..bname @@ -5948,9 +6845,9 @@ function resolvers.load(option) statistics.starttiming(instance) resolvers.resetconfig() resolvers.identify_cnf() - resolvers.load_lua() + resolvers.load_lua() -- will become the new method resolvers.expand_variables() - resolvers.load_cnf() + resolvers.load_cnf() -- will be skipped when we have a lua file resolvers.expand_variables() if option ~= "nofiles" then resolvers.load_hash() @@ -5962,22 +6859,23 @@ end function resolvers.for_files(command, files, filetype, mustexist) if files and #files > 0 then local function report(str) - if trace_verbose then + if trace_locating then logs.report("fileio",str) -- has already verbose else print(str) end end - if trace_verbose then - report('') + if trace_locating then + report('') -- ? end - for _, file in ipairs(files) do + for f=1,#files do + local file = files[f] local result = command(file,filetype,mustexist) if type(result) == 'string' then report(result) else - for _,v in ipairs(result) do - report(v) + for i=1,#result do + report(result[i]) -- could be unpack end end end @@ -6024,18 +6922,19 @@ end function table.sequenced(t,sep) -- temp here local s = { } - for k, v in pairs(t) do -- pairs? - s[#s+1] = k .. "=" .. v + for k, v in next, t do -- indexed? + s[#s+1] = k .. "=" .. tostring(v) end return concat(s, sep or " | ") end function resolvers.methodhandler(what, filename, filetype) -- ... + filename = file.collapse_path(filename) local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb local scheme = specification.scheme if resolvers[what][scheme] then if trace_locating then - logs.report("fileio",'= handler: %s -> %s -> %s',specification.original,what,table.sequenced(specification)) + logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) end return resolvers[what][scheme](filename,filetype) -- todo: specification else @@ -6055,8 +6954,9 @@ function resolvers.clean_path(str) end function resolvers.do_with_path(name,func) - for _, v in pairs(resolvers.expanded_path_list(name)) do -- pairs? - func("^"..resolvers.clean_path(v)) + local pathlist = resolvers.expanded_path_list(name) + for i=1,#pathlist do + func("^"..resolvers.clean_path(pathlist[i])) end end @@ -6065,7 +6965,9 @@ function resolvers.do_with_var(name,func) end function resolvers.with_files(pattern,handle) - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for i=1,#hashes do + local hash = hashes[i] local blobpath = hash.tag local blobtype = hash.type if blobpath then @@ -6080,7 +6982,7 @@ function resolvers.with_files(pattern,handle) if type(v) == "string" then handle(blobtype,blobpath,v,k) else - for _,vv in pairs(v) do -- ipairs? + for _,vv in next, v do -- indexed handle(blobtype,blobpath,vv,k) end end @@ -6092,7 +6994,7 @@ function resolvers.with_files(pattern,handle) end function resolvers.locate_format(name) - local barename, fmtname = name:gsub("%.%a+$",""), "" + local barename, fmtname = gsub(name,"%.%a+$",""), "" if resolvers.usecache then local path = file.join(caches.setpath("formats")) -- maybe platform fmtname = file.join(path,barename..".fmt") or "" @@ -6140,7 +7042,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6164,7 +7066,7 @@ luatools with a recache feature.

local format, lower, gsub = string.format, string.lower, string.gsub -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet caches = caches or { } @@ -6251,7 +7153,8 @@ function caches.setpath(...) caches.path = '.' end caches.path = resolvers.clean_path(caches.path) - if not table.is_empty({...}) then + local dirs = { ... } + if #dirs > 0 then local pth = dir.mkdirs(caches.path,...) return pth end @@ -6297,6 +7200,7 @@ function caches.savedata(filepath,filename,data,raw) if raw then reduce, simplify = false, false end + data.cache_uuid = os.uuid() if caches.direct then file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex else @@ -6322,7 +7226,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6343,7 +7247,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-out'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6359,7 +7263,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-con'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6370,8 +7274,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) --[[ldx--

Once we found ourselves defining similar cache constructs @@ -6435,7 +7337,7 @@ end function containers.is_valid(container, name) if name and name ~= "" then local storage = container.storage[name] - return storage and not table.is_empty(storage) and storage.cache_version == container.version + return storage and storage.cache_version == container.version else return false end @@ -6487,16 +7389,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-use'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, lower, gsub = string.format, string.lower, string.gsub +local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -- since we want to use the cache instead of the tree, we will now -- reimplement the saver. @@ -6540,19 +7441,20 @@ resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT')) - if table.is_empty(mountpaths) and usecache then + if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = { caches.setpath("mount") } end - if not table.is_empty(mountpaths) then + if mountpaths and #mountpaths > 0 then statistics.starttiming(resolvers.instance) - for k, root in pairs(mountpaths) do + for k=1,#mountpaths do + local root = mountpaths[k] local f = io.open(root.."/url.tmi") if f then for line in f:lines() do if line then - if line:find("^[%%#%-]") then -- or %W + if find(line,"^[%%#%-]") then -- or %W -- skip - elseif line:find("^zip://") then + elseif find(line,"^zip://") then if trace_locating then logs.report("fileio","mounting %s",line) end @@ -6597,11 +7499,13 @@ function statistics.check_fmt_status(texname) local luv = dofile(luvname) if luv and luv.sourcefile then local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown") - if luv.enginebanner and luv.enginebanner ~= enginebanner then - return "engine mismatch" + local luvbanner = luv.enginebanner or "?" + if luvbanner ~= enginebanner then + return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner) end - if luv.sourcehash and luv.sourcehash ~= sourcehash then - return "source mismatch" + local luvhash = luv.sourcehash or "?" + if luvhash ~= sourcehash then + return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash) end else return "invalid status file" @@ -6727,7 +7631,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6735,47 +7639,47 @@ if not modules then modules = { } end modules ['data-aux'] = { local find = string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") local oldscript = resolvers.clean_path(oldname) - if trace_verbose then + if trace_locating then logs.report("fileio","to be replaced old script %s", oldscript) end local newscripts = resolvers.find_files(newname) or { } if #newscripts == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","unable to locate new script") end else for i=1,#newscripts do local newscript = resolvers.clean_path(newscripts[i]) - if trace_verbose then + if trace_locating then logs.report("fileio","checking new script %s", newscript) end if oldscript == newscript then - if trace_verbose then + if trace_locating then logs.report("fileio","old and new script are the same") end elseif not find(newscript,scriptpath) then - if trace_verbose then + if trace_locating then logs.report("fileio","new script should come from %s",scriptpath) end elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then - if trace_verbose then + if trace_locating then logs.report("fileio","invalid new script name") end else local newdata = io.loaddata(newscript) if newdata then - if trace_verbose then + if trace_locating then logs.report("fileio","old script content replaced by new content") end io.savedata(oldscript,newdata) break - elseif trace_verbose then + elseif trace_locating then logs.report("fileio","unable to load new script") end end @@ -6790,7 +7694,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-lst'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6814,7 +7718,9 @@ local function list(list,report) local instance = resolvers.instance local pat = upper(pattern or "","") local report = report or texio.write_nl - for _,key in pairs(table.sortedkeys(list)) do + local sorted = table.sortedkeys(list) + for i=1,#sorted do + local key = sorted[i] if instance.pattern == "" or find(upper(key),pat) then if instance.kpseonly then if instance.kpsevars[key] then @@ -6833,11 +7739,14 @@ function resolvers.listers.expansions() list(resolvers.instance.expansions) end function resolvers.listers.configurations(report) local report = report or texio.write_nl local instance = resolvers.instance - for _,key in ipairs(table.sortedkeys(instance.kpsevars)) do + local sorted = table.sortedkeys(instance.kpsevars) + for i=1,#sorted do + local key = sorted[i] if not instance.pattern or (instance.pattern=="") or find(key,instance.pattern) then report(format("%s\n",key)) - for i,c in ipairs(instance.order) do - local str = c[key] + local order = instance.order + for i=1,#order do + local str = order[i][key] if str then report(format("\t%s\t%s",i,str)) end @@ -6943,7 +7852,7 @@ if not resolvers then os.exit() end -logs.setprogram('LuaTools',"TDS Management Tool 1.31",environment.arguments["verbose"] or false) +logs.setprogram('LuaTools',"TDS Management Tool 1.32",environment.arguments["verbose"] or false) local instance = resolvers.reset() @@ -7000,6 +7909,12 @@ end if environment.arguments["trace"] then resolvers.settrace(environment.arguments["trace"]) end +local trackspec = environment.argument("trackers") or environment.argument("track") + +if trackspec then + trackers.enable(trackspec) +end + runners = runners or { } messages = messages or { } @@ -7033,6 +7948,7 @@ messages.help = [[ --engine=str target engine --progname=str format or backend --pattern=str filter variables +--trackers=list enable given trackers ]] function runners.make_format(texname) @@ -7091,8 +8007,9 @@ function runners.make_format(texname) logs.simple("using uncompiled initialization file: %s",luaname) end else - for _, v in pairs({instance.luaname, instance.progname, barename}) do - v = string.gsub(v..".lua","%.lua%.lua$",".lua") + local what = { instance.luaname, instance.progname, barename } + for k=1,#what do + local v = string.gsub(what[k]..".lua","%.lua%.lua$",".lua") if v and (v ~= "") then luaname = resolvers.find_files(v)[1] or "" if luaname ~= "" then @@ -7116,7 +8033,8 @@ function runners.make_format(texname) logs.simple("using lua initialization file: %s",luaname) local mp = dir.glob(file.removesuffix(file.basename(luaname)).."-*.mem") if mp and #mp > 0 then - for _, name in ipairs(mp) do + for i=1,#mp do + local name = mp[i] logs.simple("removing related mplib format %s", file.basename(name)) os.remove(name) end diff --git a/Master/texmf-dist/scripts/context/stubs/unix/makempy b/Master/texmf-dist/scripts/context/stubs/unix/makempy deleted file mode 100755 index 34892b28460..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/makempy +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute makempy.pl "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mpstools b/Master/texmf-dist/scripts/context/stubs/unix/mpstools deleted file mode 100755 index 1a64d90b03f..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/mpstools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute mpstools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mptopdf b/Master/texmf-dist/scripts/context/stubs/unix/mptopdf deleted file mode 100755 index f57a8b7a792..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/mptopdf +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute mptopdf.pl "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun index 82d1edecbc5..b99327692d7 100644 --- a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun +++ b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun @@ -48,13 +48,16 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-string'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local sub, gsub, find, match, gmatch, format, char, byte, rep = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep +local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower +local lpegmatch = lpeg.match + +-- some functions may disappear as they are not used anywhere if not string.split then @@ -94,8 +97,16 @@ function string:unquote() return (gsub(self,"^([\"\'])(.*)%1$","%2")) end +--~ function string:unquote() +--~ if find(self,"^[\'\"]") then +--~ return sub(self,2,-2) +--~ else +--~ return self +--~ end +--~ end + function string:quote() -- we could use format("%q") - return '"' .. self:unquote() .. '"' + return format("%q",self) end function string:count(pattern) -- variant 3 @@ -115,12 +126,23 @@ function string:limit(n,sentinel) end end -function string:strip() - return (gsub(self,"^%s*(.-)%s*$", "%1")) +--~ function string:strip() -- the .- is quite efficient +--~ -- return match(self,"^%s*(.-)%s*$") or "" +--~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list +--~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)') +--~ end + +do -- roberto's variant: + local space = lpeg.S(" \t\v\n") + local nospace = 1 - space + local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0) + function string.strip(str) + return lpegmatch(stripper,str) or "" + end end function string:is_empty() - return not find(find,"%S") + return not find(self,"%S") end function string:enhance(pattern,action) @@ -154,14 +176,14 @@ if not string.characters then local function nextchar(str, index) index = index + 1 - return (index <= #str) and index or nil, str:sub(index,index) + return (index <= #str) and index or nil, sub(str,index,index) end function string:characters() return nextchar, self, 0 end local function nextbyte(str, index) index = index + 1 - return (index <= #str) and index or nil, byte(str:sub(index,index)) + return (index <= #str) and index or nil, byte(sub(str,index,index)) end function string:bytes() return nextbyte, self, 0 @@ -174,7 +196,7 @@ end function string:rpadd(n,chr) local m = n-#self if m > 0 then - return self .. self.rep(chr or " ",m) + return self .. rep(chr or " ",m) else return self end @@ -183,7 +205,7 @@ end function string:lpadd(n,chr) local m = n-#self if m > 0 then - return self.rep(chr or " ",m) .. self + return rep(chr or " ",m) .. self else return self end @@ -231,6 +253,17 @@ function string:pattesc() return (gsub(self,".",patterns_escapes)) end +local simple_escapes = { + ["-"] = "%-", + ["."] = "%.", + ["?"] = ".", + ["*"] = ".*", +} + +function string:simpleesc() + return (gsub(self,".",simple_escapes)) +end + function string:tohash() local t = { } for s in gmatch(self,"([^, ]+)") do -- lpeg @@ -242,10 +275,10 @@ end local pattern = lpeg.Ct(lpeg.C(1)^0) function string:totable() - return pattern:match(self) + return lpegmatch(pattern,self) end ---~ for _, str in ipairs { +--~ local t = { --~ "1234567123456712345671234567", --~ "a\tb\tc", --~ "aa\tbb\tcc", @@ -253,7 +286,10 @@ end --~ "aaaa\tbbbb\tcccc", --~ "aaaaa\tbbbbb\tccccc", --~ "aaaaaa\tbbbbbb\tcccccc", ---~ } do print(string.tabtospace(str)) end +--~ } +--~ for k,v do +--~ print(string.tabtospace(t[k])) +--~ end function string.tabtospace(str,tab) -- we don't handle embedded newlines @@ -261,7 +297,7 @@ function string.tabtospace(str,tab) local s = find(str,"\t") if s then if not tab then tab = 7 end -- only when found - local d = tab-(s-1)%tab + local d = tab-(s-1) % tab if d > 0 then str = gsub(str,"\t",rep(" ",d),1) else @@ -280,6 +316,25 @@ function string:compactlong() -- strips newlines and leading spaces return self end +function string:striplong() -- strips newlines and leading spaces + self = gsub(self,"^%s*","") + self = gsub(self,"[\n\r]+ *","\n") + return self +end + +function string:topattern(lowercase,strict) + if lowercase then + self = lower(self) + end + self = gsub(self,".",simple_escapes) + if self == "" then + self = ".*" + elseif strict then + self = "^" .. self .. "$" + end + return self +end + end -- of closure @@ -287,58 +342,64 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-lpeg'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local P, S, Ct, C, Cs, Cc = lpeg.P, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc - ---~ l-lpeg.lua : - ---~ lpeg.digit = lpeg.R('09')^1 ---~ lpeg.sign = lpeg.S('+-')^1 ---~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1) ---~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1) ---~ lpeg.number = lpeg.float + lpeg.integer ---~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1 ---~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1 ---~ lpeg.uppercase = lpeg.P("AZ") ---~ lpeg.lowercase = lpeg.P("az") - ---~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed ---~ lpeg.space = lpeg.S(' ')^1 ---~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1 ---~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1 ---~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1 - -local hash = { } +local lpeg = require("lpeg") + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + +local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V +local match = lpeg.match + +local digit, sign = R('09'), S('+-') +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local utf8byte = R("\128\191") + +patterns.utf8byte = utf8byte +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8byte +patterns.utf8three = R("\224\239") * utf8byte * utf8byte +patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte + +patterns.digit = digit +patterns.sign = sign +patterns.cardinal = sign^0 * digit^1 +patterns.integer = sign^0 * digit^1 +patterns.float = sign^0 * digit^0 * P('.') * digit^1 +patterns.number = patterns.float + patterns.integer +patterns.oct = P("0") * R("07")^1 +patterns.octal = patterns.oct +patterns.HEX = P("0x") * R("09","AF")^1 +patterns.hex = P("0x") * R("09","af")^1 +patterns.hexadecimal = P("0x") * R("09","AF","af")^1 +patterns.lowercase = R("az") +patterns.uppercase = R("AZ") +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = S(" ") +patterns.eol = S("\n\r") +patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) +patterns.newline = crlf + cr + lf +patterns.nonspace = 1 - patterns.space +patterns.nonspacer = 1 - patterns.spacer +patterns.whitespace = patterns.eol + patterns.spacer +patterns.nonwhitespace = 1 - patterns.whitespace +patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * lpeg.V(1) } -end - -function lpeg.startswith(pattern) --slightly adapted - return P(pattern) + return P { P(pattern) + 1 * V(1) } -- why so complex? end function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end --- variant: - ---~ local parser = lpeg.Ct(lpeg.splitat(newline)) - -local crlf = P("\r\n") -local cr = P("\r") -local lf = P("\n") -local space = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -local newline = crlf + cr + lf -local spacing = space^0 * newline - +local spacing = patterns.spacer^0 * patterns.newline -- sort of strip local empty = spacing * Cc("") local nonempty = Cs((1-spacing)^1) * spacing^-1 local content = (empty + nonempty)^1 @@ -346,15 +407,15 @@ local content = (empty + nonempty)^1 local capture = Ct(content^0) function string:splitlines() - return capture:match(self) + return match(capture,self) end -lpeg.linebyline = content -- better make a sublibrary +patterns.textline = content ---~ local p = lpeg.splitat("->",false) print(p:match("oeps->what->more")) -- oeps what more ---~ local p = lpeg.splitat("->",true) print(p:match("oeps->what->more")) -- oeps what->more ---~ local p = lpeg.splitat("->",false) print(p:match("oeps")) -- oeps ---~ local p = lpeg.splitat("->",true) print(p:match("oeps")) -- oeps +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more +--~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps +--~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps local splitters_s, splitters_m = { }, { } @@ -364,7 +425,7 @@ local function splitat(separator,single) separator = P(separator) if single then local other, any = C((1 - separator)^0), P(1) - splitter = other * (separator * C(any^0) + "") + splitter = other * (separator * C(any^0) + "") -- ? splitters_s[separator] = splitter else local other = C((1 - separator)^0) @@ -379,15 +440,72 @@ lpeg.splitat = splitat local cache = { } +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = Ct(splitat(separator)) + cache[separator] = c + end + return match(c,str) +end + function string:split(separator) local c = cache[separator] if not c then c = Ct(splitat(separator)) cache[separator] = c end - return c:match(self) + return match(c,self) +end + +lpeg.splitters = cache + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,str) +end + +function string:checkedsplit(separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^0) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return match(c,self) end +--~ function lpeg.append(list,pp) +--~ local p = pp +--~ for l=1,#list do +--~ if p then +--~ p = p + P(list[l]) +--~ else +--~ p = P(list[l]) +--~ end +--~ end +--~ return p +--~ end + +--~ from roberto's site: + +local f1 = string.byte + +local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + end -- of closure @@ -395,7 +513,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-table'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -404,9 +522,58 @@ if not modules then modules = { } end modules ['l-table'] = { table.join = table.concat local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove -local format, find, gsub, lower, dump = string.format, string.find, string.gsub, string.lower, string.dump +local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match local getmetatable, setmetatable = getmetatable, setmetatable -local type, next, tostring, ipairs = type, next, tostring, ipairs +local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs + +-- Starting with version 5.2 Lua no longer provide ipairs, which makes +-- sense. As we already used the for loop and # in most places the +-- impact on ConTeXt was not that large; the remaining ipairs already +-- have been replaced. In a similar fashio we also hardly used pairs. +-- +-- Just in case, we provide the fallbacks as discussed in Programming +-- in Lua (http://www.lua.org/pil/7.3.html): + +if not ipairs then + + -- for k, v in ipairs(t) do ... end + -- for k=1,#t do local v = t[k] ... end + + local function iterate(a,i) + i = i + 1 + local v = a[i] + if v ~= nil then + return i, v --, nil + end + end + + function ipairs(a) + return iterate, a, 0 + end + +end + +if not pairs then + + -- for k, v in pairs(t) do ... end + -- for k, v in next, t do ... end + + function pairs(t) + return next, t -- , nil + end + +end + +-- Also, unpack has been moved to the table table, and for compatiility +-- reasons we provide both now. + +if not table.unpack then + table.unpack = _G.unpack +elseif not unpack then + _G.unpack = table.unpack +end + +-- extra functions, some might go (when not used) function table.strip(tab) local lst = { } @@ -421,6 +588,14 @@ function table.strip(tab) return lst end +function table.keys(t) + local k = { } + for key, _ in next, t do + k[#k+1] = key + end + return k +end + local function compare(a,b) return (tostring(a) < tostring(b)) end @@ -464,7 +639,7 @@ end table.sortedkeys = sortedkeys table.sortedhashkeys = sortedhashkeys -function table.sortedpairs(t) +function table.sortedhash(t) local s = sortedhashkeys(t) -- maybe just sortedkeys local n = 0 local function kv(s) @@ -475,6 +650,8 @@ function table.sortedpairs(t) return kv, s end +table.sortedpairs = table.sortedhash + function table.append(t, list) for _,v in next, list do insert(t,v) @@ -583,7 +760,7 @@ end table.fastcopy = fastcopy table.copy = copy --- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) +-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) function table.sub(t,i,j) return { unpack(t,i,j) } @@ -597,18 +774,18 @@ end -- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) -function table.is_empty(t) +function table.is_empty(t) -- obolete, use inline code instead return not t or not next(t) end -function table.one_entry(t) +function table.one_entry(t) -- obolete, use inline code instead local n = next(t) return n and not next(t,n) end -function table.starts_at(t) - return ipairs(t,1)(t,0) -end +--~ function table.starts_at(t) -- obsolete, not nice anyway +--~ return ipairs(t,1)(t,0) +--~ end function table.tohash(t,value) local h = { } @@ -686,6 +863,8 @@ end -- -- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) +-- problem: there no good number_to_string converter with the best resolution + local function do_serialize(root,name,depth,level,indexed) if level > 0 then depth = depth .. " " @@ -708,8 +887,9 @@ local function do_serialize(root,name,depth,level,indexed) handle(format("%s{",depth)) end end + -- we could check for k (index) being number (cardinal) if root and next(root) then - local first, last = nil, 0 -- #root cannot be trusted here + local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) if compact then -- NOT: for k=1,#root do (we need to quit at nil) for k,v in ipairs(root) do -- can we use next? @@ -730,10 +910,10 @@ local function do_serialize(root,name,depth,level,indexed) if hexify then handle(format("%s 0x%04X,",depth,v)) else - handle(format("%s %s,",depth,v)) + handle(format("%s %s,",depth,v)) -- %.99g end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then handle(format("%s %s,",depth,v)) else handle(format("%s %q,",depth,v)) @@ -770,29 +950,29 @@ local function do_serialize(root,name,depth,level,indexed) --~ if hexify then --~ handle(format("%s %s=0x%04X,",depth,key(k),v)) --~ else - --~ handle(format("%s %s=%s,",depth,key(k),v)) + --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g --~ end if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then handle(format("%s [0x%04X]=0x%04X,",depth,k,v)) else - handle(format("%s [%s]=%s,",depth,k,v)) + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g end elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then if hexify then handle(format("%s %s=0x%04X,",depth,k,v)) else - handle(format("%s %s=%s,",depth,k,v)) + handle(format("%s %s=%s,",depth,k,v)) -- %.99g end else if hexify then handle(format("%s [%q]=0x%04X,",depth,k,v)) else - handle(format("%s [%q]=%s,",depth,k,v)) + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g end end elseif t == "string" then - if reduce and (find(v,"^[%-%+]?[%d]-%.?[%d+]$") == 1) then + if reduce and tonumber(v) then --~ handle(format("%s %s=%s,",depth,key(k),v)) if type(k) == "number" then -- or find(k,"^%d+$") then if hexify then @@ -1001,7 +1181,7 @@ function table.tofile(filename,root,name,reduce,noquotes,hexify) end end -local function flatten(t,f,complete) +local function flatten(t,f,complete) -- is this used? meybe a variant with next, ... for i=1,#t do local v = t[i] if type(v) == "table" then @@ -1030,6 +1210,24 @@ end table.flatten_one_level = table.unnest +-- a better one: + +local function flattened(t,f) + if not f then + f = { } + end + for k, v in next, t do + if type(v) == "table" then + flattened(v,f) + else + f[k] = v + end + end + return f +end + +table.flattened = flattened + -- the next three may disappear function table.remove_value(t,value) -- todo: n @@ -1165,7 +1363,7 @@ function table.clone(t,p) -- t is optional or nil or table elseif not t then t = { } end - setmetatable(t, { __index = function(_,key) return p[key] end }) + setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ? return t end @@ -1193,21 +1391,36 @@ function table.reverse(t) return tt end ---~ function table.keys(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return k ---~ end +function table.insert_before_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i,extra) + return + end + end + insert(t,1,extra) +end + +function table.insert_after_value(t,value,extra) + for i=1,#t do + if t[i] == extra then + remove(t,i) + end + end + for i=1,#t do + if t[i] == value then + insert(t,i+1,extra) + return + end + end + insert(t,#t+1,extra) +end ---~ function table.keys_as_string(t) ---~ local k = { } ---~ for k,_ in next, t do ---~ k[#k+1] = k ---~ end ---~ return concat(k,"") ---~ end end -- of closure @@ -1216,13 +1429,13 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-io'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local byte = string.byte +local byte, find, gsub = string.byte, string.find, string.gsub if string.find(os.getenv("PATH"),";") then io.fileseparator, io.pathseparator = "\\", ";" @@ -1251,7 +1464,7 @@ function io.savedata(filename,data,joiner) elseif type(data) == "function" then data(f) else - f:write(data) + f:write(data or "") end f:close() return true @@ -1380,20 +1593,21 @@ function io.ask(question,default,options) end io.write(string.format(" ")) local answer = io.read() - answer = answer:gsub("^%s*(.*)%s*$","%1") + answer = gsub(answer,"^%s*(.*)%s*$","%1") if answer == "" and default then return default elseif not options then return answer else - for _,v in pairs(options) do - if v == answer then + for k=1,#options do + if options[k] == answer then return answer end end local pattern = "^" .. answer - for _,v in pairs(options) do - if v:find(pattern) then + for k=1,#options do + local v = options[k] + if find(v,pattern) then return v end end @@ -1408,20 +1622,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-number'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format = string.format +local tostring = tostring +local format, floor, insert, match = string.format, math.floor, table.insert, string.match +local lpegmatch = lpeg.match number = number or { } -- a,b,c,d,e,f = number.toset(100101) function number.toset(n) - return (tostring(n)):match("(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") + return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)") end function number.toevenhex(n) @@ -1447,10 +1663,21 @@ end local one = lpeg.C(1-lpeg.S(''))^1 function number.toset(n) - return one:match(tostring(n)) + return lpegmatch(one,tostring(n)) end - +function number.bits(n,zero) + local t, i = { }, (zero and 0) or 1 + while n > 0 do + local m = n % 2 + if m > 0 then + insert(t,1,i) + end + n = floor(n/2) + i = i + 1 + end + return t +end end -- of closure @@ -1459,7 +1686,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-set'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1549,46 +1776,63 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-os'] = { version = 1.001, - comment = "companion to luat-lub.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local find = string.find +-- maybe build io.flush in os.execute + +local find, format, gsub = string.find, string.format, string.gsub +local random, ceil = math.random, math.ceil + +local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush + +function os.execute(...) ioflush() return execute(...) end +function os.spawn (...) ioflush() return spawn (...) end +function os.exec (...) ioflush() return exec (...) end function os.resultof(command) - return io.popen(command,"r"):read("*all") + ioflush() -- else messed up logging + local handle = io.popen(command,"r") + if not handle then + -- print("unknown command '".. command .. "' in os.resultof") + return "" + else + return handle:read("*all") or "" + end end -if not os.exec then os.exec = os.execute end -if not os.spawn then os.spawn = os.execute end - ---~ os.type : windows | unix (new, we already guessed os.platform) ---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.type : windows | unix (new, we already guessed os.platform) +--~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new) +--~ os.platform : extended os.name with architecture if not io.fileseparator then if find(os.getenv("PATH"),";") then - io.fileseparator, io.pathseparator, os.platform = "\\", ";", os.type or "windows" + io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin" else - io.fileseparator, io.pathseparator, os.platform = "/" , ":", os.type or "unix" + io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix" end end -os.platform = os.platform or os.type or (io.pathseparator == ";" and "windows") or "unix" +os.type = os.type or (io.pathseparator == ";" and "windows") or "unix" +os.name = os.name or (os.type == "windows" and "mswin" ) or "linux" + +if os.type == "windows" then + os.libsuffix, os.binsuffix = 'dll', 'exe' +else + os.libsuffix, os.binsuffix = 'so', '' +end function os.launch(str) - if os.platform == "windows" then + if os.type == "windows" then os.execute("start " .. str) -- os.spawn ? else os.execute(str .. " &") -- os.spawn ? end end -if not os.setenv then - function os.setenv() return false end -end - if not os.times then -- utime = user time -- stime = system time @@ -1618,64 +1862,218 @@ end --~ print(os.date("%H:%M:%S",os.gettimeofday())) --~ print(os.date("%H:%M:%S",os.time())) -os.arch = os.arch or function() - local a = os.resultof("uname -m") or "linux" - os.arch = function() - return a +-- no need for function anymore as we have more clever code and helpers now +-- this metatable trickery might as well disappear + +os.resolvers = os.resolvers or { } + +local resolvers = os.resolvers + +local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil +local osix = osmt.__index + +osmt.__index = function(t,k) + return (resolvers[k] or osix)(t,k) +end + +setmetatable(os,osmt) + +if not os.setenv then + + -- we still store them but they won't be seen in + -- child processes although we might pass them some day + -- using command concatination + + local env, getenv = { }, os.getenv + + function os.setenv(k,v) + env[k] = v + end + + function os.getenv(k) + return env[k] or getenv(k) end - return a + end -local platform +-- we can use HOSTTYPE on some platforms -function os.currentplatform(name,default) - if not platform then - local name = os.name or os.platform or name -- os.name is built in, os.platform is mine - if not name then - platform = default or "linux" - elseif name == "windows" or name == "mswin" or name == "win32" or name == "msdos" then - if os.getenv("PROCESSOR_ARCHITECTURE") == "AMD64" then - platform = "mswin-64" - else - platform = "mswin" - end +local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or "" + +local function guess() + local architecture = os.resultof("uname -m") or "" + if architecture ~= "" then + return architecture + end + architecture = os.getenv("HOSTTYPE") or "" + if architecture ~= "" then + return architecture + end + return os.resultof("echo $HOSTTYPE") or "" +end + +if platform ~= "" then + + os.platform = platform + +elseif os.type == "windows" then + + -- we could set the variable directly, no function needed here + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or "" + if find(architecture,"AMD64") then + platform = "mswin-64" else - local architecture = os.arch() - if name == "linux" then - if find(architecture,"x86_64") then - platform = "linux-64" - elseif find(architecture,"ppc") then - platform = "linux-ppc" - else - platform = "linux" - end - elseif name == "macosx" then - if find(architecture,"i386") then - platform = "osx-intel" - else - platform = "osx-ppc" - end - elseif name == "sunos" then - if find(architecture,"sparc") then - platform = "solaris-sparc" - else -- if architecture == 'i86pc' - platform = "solaris-intel" - end - elseif name == "freebsd" then - if find(architecture,"amd64") then - platform = "freebsd-amd64" - else - platform = "freebsd" - end - else - platform = default or name - end + platform = "mswin" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "linux" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "linux-64" + elseif find(architecture,"ppc") then + platform = "linux-ppc" + else + platform = "linux" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "macosx" then + + --[[ + Identifying the architecture of OSX is quite a mess and this + is the best we can come up with. For some reason $HOSTTYPE is + a kind of pseudo environment variable, not known to the current + environment. And yes, uname cannot be trusted either, so there + is a change that you end up with a 32 bit run on a 64 bit system. + Also, some proper 64 bit intel macs are too cheap (low-end) and + therefore not permitted to run the 64 bit kernel. + ]]-- + + function os.resolvers.platform(t,k) + -- local platform, architecture = "", os.getenv("HOSTTYPE") or "" + -- if architecture == "" then + -- architecture = os.resultof("echo $HOSTTYPE") or "" + -- end + local platform, architecture = "", os.resultof("echo $HOSTTYPE") or "" + if architecture == "" then + -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n") + platform = "osx-intel" + elseif find(architecture,"i386") then + platform = "osx-intel" + elseif find(architecture,"x86_64") then + platform = "osx-64" + else + platform = "osx-ppc" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "sunos" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"sparc") then + platform = "solaris-sparc" + else -- if architecture == 'i86pc' + platform = "solaris-intel" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "freebsd" then + + function os.resolvers.platform(t,k) + local platform, architecture = "", os.resultof("uname -m") or "" + if find(architecture,"amd64") then + platform = "freebsd-amd64" + else + platform = "freebsd" + end + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +elseif name == "kfreebsd" then + + function os.resolvers.platform(t,k) + -- we sometims have HOSTTYPE set so let's check that first + local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or "" + if find(architecture,"x86_64") then + platform = "kfreebsd-64" + else + platform = "kfreebsd-i386" end - function os.currentplatform() - return platform + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +else + + -- platform = "linux" + -- os.setenv("MTX_PLATFORM",platform) + -- os.platform = platform + + function os.resolvers.platform(t,k) + local platform = "linux" + os.setenv("MTX_PLATFORM",platform) + os.platform = platform + return platform + end + +end + +-- beware, we set the randomseed + +-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the +-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom +-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal +-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479. +-- +-- as we don't call this function too often there is not so much risk on repetition + +local t = { 8, 9, "a", "b" } + +function os.uuid() + return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x", + random(0xFFFF),random(0xFFFF), + random(0x0FFF), + t[ceil(random(4))] or 8,random(0x0FFF), + random(0xFFFF), + random(0xFFFF),random(0xFFFF),random(0xFFFF) + ) +end + +local d + +function os.timezone(delta) + d = d or tonumber(tonumber(os.date("%H")-os.date("!%H"))) + if delta then + if d > 0 then + return format("+%02i:00",d) + else + return format("-%02i:00",-d) end + else + return 1 end - return platform end @@ -1685,7 +2083,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-file'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -1696,14 +2094,17 @@ if not modules then modules = { } end modules ['l-file'] = { file = file or { } local concat = table.concat -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char +local lpegmatch = lpeg.match function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) end function file.addsuffix(filename, suffix) - if not find(filename,"%.[%a%d]+$") then + if not suffix or suffix == "" then + return filename + elseif not find(filename,"%.[%a%d]+$") then return filename .. "." .. suffix else return filename @@ -1726,20 +2127,39 @@ function file.nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$","")) end -function file.extname(name) - return match(name,"^.+%.([^/\\]-)$") or "" +function file.extname(name,default) + return match(name,"^.+%.([^/\\]-)$") or default or "" end file.suffix = file.extname ---~ print(file.join("x/","/y")) ---~ print(file.join("http://","/y")) ---~ print(file.join("http://a","/y")) ---~ print(file.join("http:///a","/y")) ---~ print(file.join("//nas-1","/y")) +--~ function file.join(...) +--~ local pth = concat({...},"/") +--~ pth = gsub(pth,"\\","/") +--~ local a, b = match(pth,"^(.*://)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ a, b = match(pth,"^(//)(.*)$") +--~ if a and b then +--~ return a .. gsub(b,"//+","/") +--~ end +--~ return (gsub(pth,"//+","/")) +--~ end + +local trick_1 = char(1) +local trick_2 = "^" .. trick_1 .. "/+" function file.join(...) - local pth = concat({...},"/") + local lst = { ... } + local a, b = lst[1], lst[2] + if a == "" then + lst[1] = trick_1 + elseif b and find(a,"^/+$") and find(b,"^/") then + lst[1] = "" + lst[2] = gsub(b,"^/+","") + end + local pth = concat(lst,"/") pth = gsub(pth,"\\","/") local a, b = match(pth,"^(.*://)(.*)$") if a and b then @@ -1749,17 +2169,28 @@ function file.join(...) if a and b then return a .. gsub(b,"//+","/") end + pth = gsub(pth,trick_2,"") return (gsub(pth,"//+","/")) end +--~ print(file.join("//","/y")) +--~ print(file.join("/","/y")) +--~ print(file.join("","/y")) +--~ print(file.join("/x/","/y")) +--~ print(file.join("x/","/y")) +--~ print(file.join("http://","/y")) +--~ print(file.join("http://a","/y")) +--~ print(file.join("http:///a","/y")) +--~ print(file.join("//nas-1","/y")) + function file.iswritable(name) local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,".")) - return a and a.permissions:sub(2,2) == "w" + return a and sub(a.permissions,2,2) == "w" end function file.isreadable(name) local a = lfs.attributes(name) - return a and a.permissions:sub(1,1) == "r" + return a and sub(a.permissions,1,1) == "r" end file.is_readable = file.isreadable @@ -1767,36 +2198,50 @@ file.is_writable = file.iswritable -- todo: lpeg -function file.split_path(str) - local t = { } - str = gsub(str,"\\", "/") - str = gsub(str,"(%a):([;/])", "%1\001%2") - for name in gmatch(str,"([^;:]+)") do - if name ~= "" then - t[#t+1] = gsub(name,"\001",":") - end - end - return t +--~ function file.split_path(str) +--~ local t = { } +--~ str = gsub(str,"\\", "/") +--~ str = gsub(str,"(%a):([;/])", "%1\001%2") +--~ for name in gmatch(str,"([^;:]+)") do +--~ if name ~= "" then +--~ t[#t+1] = gsub(name,"\001",":") +--~ end +--~ end +--~ return t +--~ end + +local checkedsplit = string.checkedsplit + +function file.split_path(str,separator) + str = gsub(str,"\\","/") + return checkedsplit(str,separator or io.pathseparator) end function file.join_path(tab) return concat(tab,io.pathseparator) -- can have trailing // end +-- we can hash them weakly + function file.collapse_path(str) - str = gsub(str,"/%./","/") - local n, m = 1, 1 - while n > 0 or m > 0 do - str, n = gsub(str,"[^/%.]+/%.%.$","") - str, m = gsub(str,"[^/%.]+/%.%./","") - end - str = gsub(str,"([^/])/$","%1") - str = gsub(str,"^%./","") - str = gsub(str,"/%.$","") + str = gsub(str,"\\","/") + if find(str,"/") then + str = gsub(str,"^%./",(gsub(lfs.currentdir(),"\\","/")) .. "/") -- ./xx in qualified + str = gsub(str,"/%./","/") + local n, m = 1, 1 + while n > 0 or m > 0 do + str, n = gsub(str,"[^/%.]+/%.%.$","") + str, m = gsub(str,"[^/%.]+/%.%./","") + end + str = gsub(str,"([^/])/$","%1") + -- str = gsub(str,"^%./","") -- ./xx in qualified + str = gsub(str,"/%.$","") + end if str == "" then str = "." end return str end +--~ print(file.collapse_path("/a")) --~ print(file.collapse_path("a/./b/..")) --~ print(file.collapse_path("a/aa/../b/bb")) --~ print(file.collapse_path("a/../..")) @@ -1826,27 +2271,27 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1 --~ function file.extname(name) ---~ return pattern:match(name) or "" +--~ return lpegmatch(pattern,name) or "" --~ end --~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1) --~ function file.removesuffix(name) ---~ return pattern:match(name) +--~ return lpegmatch(pattern,name) --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1 --~ function file.basename(name) ---~ return pattern:match(name) or name +--~ return lpegmatch(pattern,name) or name --~ end --~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1 --~ function file.dirname(name) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) +--~ return sub(name,1,p-2) --~ else --~ return "" --~ end @@ -1855,7 +2300,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.addsuffix(name, suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then --~ return name --~ else @@ -1866,9 +2311,9 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1 --~ function file.replacesuffix(name,suffix) ---~ local p = pattern:match(name) +--~ local p = lpegmatch(pattern,name) --~ if p then ---~ return name:sub(1,p-2) .. "." .. suffix +--~ return sub(name,1,p-2) .. "." .. suffix --~ else --~ return name .. "." .. suffix --~ end @@ -1877,11 +2322,11 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1 --~ function file.nameonly(name) ---~ local a, b = pattern:match(name) +--~ local a, b = lpegmatch(pattern,name) --~ if b then ---~ return name:sub(a,b-2) +--~ return sub(name,a,b-2) --~ elseif a then ---~ return name:sub(a) +--~ return sub(name,a) --~ else --~ return name --~ end @@ -1915,11 +2360,11 @@ local rootbased = lpeg.P("/") + letter*lpeg.P(":") -- ./name ../name /name c: :// name/name function file.is_qualified_path(filename) - return qualified:match(filename) + return lpegmatch(qualified,filename) ~= nil end function file.is_rootbased_path(filename) - return rootbased:match(filename) + return lpegmatch(rootbased,filename) ~= nil end local slash = lpeg.S("\\/") @@ -1932,16 +2377,25 @@ local base = lpeg.C((1-suffix)^0) local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc("")) function file.splitname(str) -- returns drive, path, base, suffix - return pattern:match(str) + return lpegmatch(pattern,str) end --- function test(t) for k, v in pairs(t) do print(v, "=>", file.splitname(v)) end end +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end -- -- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } -- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } -- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } -- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } +--~ -- todo: +--~ +--~ if os.type == "windows" then +--~ local currentdir = lfs.currentdir +--~ function lfs.currentdir() +--~ return (gsub(currentdir(),"\\","/")) +--~ end +--~ end + end -- of closure @@ -2006,7 +2460,7 @@ end function file.loadchecksum(name) if md5 then local data = io.loaddata(name .. ".md5") - return data and data:gsub("%s","") + return data and (gsub(data,"%s","")) end return nil end @@ -2025,19 +2479,168 @@ end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['l-dir'] = { +if not modules then modules = { } end modules ['l-url'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local char, gmatch, gsub = string.char, string.gmatch, string.gsub +local tonumber, type = tonumber, type +local lpegmatch = lpeg.match + +-- from the spec (on the web): +-- +-- foo://example.com:8042/over/there?name=ferret#nose +-- \_/ \______________/\_________/ \_________/ \__/ +-- | | | | | +-- scheme authority path query fragment +-- | _____________________|__ +-- / \ / \ +-- urn:example:animal:ferret:nose + +url = url or { } + +local function tochar(s) + return char(tonumber(s,16)) +end + +local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1) + +local hexdigit = lpeg.R("09","AF","af") +local plus = lpeg.P("+") +local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar) + +-- we assume schemes with more than 1 character (in order to avoid problems with windows disks) + +local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("") +local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("") +local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("") +local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("") +local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("") + +local parser = lpeg.Ct(scheme * authority * path * query * fragment) + +-- todo: reconsider Ct as we can as well have five return values (saves a table) +-- so we can have two parsers, one with and one without + +function url.split(str) + return (type(str) == "string" and lpegmatch(parser,str)) or str +end + +-- todo: cache them + +function url.hashed(str) + local s = url.split(str) + local somescheme = s[1] ~= "" + return { + scheme = (somescheme and s[1]) or "file", + authority = s[2], + path = s[3], + query = s[4], + fragment = s[5], + original = str, + noscheme = not somescheme, + } +end + +function url.hasscheme(str) + return url.split(str)[1] ~= "" +end + +function url.addscheme(str,scheme) + return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str) +end + +function url.construct(hash) + local fullurl = hash.sheme .. "://".. hash.authority .. hash.path + if hash.query then + fullurl = fullurl .. "?".. hash.query + end + if hash.fragment then + fullurl = fullurl .. "?".. hash.fragment + end + return fullurl +end + +function url.filename(filename) + local t = url.hashed(filename) + return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename +end + +function url.query(str) + if type(str) == "string" then + local t = { } + for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do + t[k] = v + end + return t + else + return str + end +end + +--~ print(url.filename("file:///c:/oeps.txt")) +--~ print(url.filename("c:/oeps.txt")) +--~ print(url.filename("file:///oeps.txt")) +--~ print(url.filename("file:///etc/test.txt")) +--~ print(url.filename("/oeps.txt")) + +--~ from the spec on the web (sort of): +--~ +--~ function test(str) +--~ print(table.serialize(url.hashed(str))) +--~ end +--~ +--~ test("%56pass%20words") +--~ test("file:///c:/oeps.txt") +--~ test("file:///c|/oeps.txt") +--~ test("file:///etc/oeps.txt") +--~ test("file://./etc/oeps.txt") +--~ test("file:////etc/oeps.txt") +--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt") +--~ test("http://www.ietf.org/rfc/rfc2396.txt") +--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what") +--~ test("mailto:John.Doe@example.com") +--~ test("news:comp.infosystems.www.servers.unix") +--~ test("tel:+1-816-555-1212") +--~ test("telnet://192.0.2.16:80/") +--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2") +--~ test("/etc/passwords") +--~ test("http://www.pragma-ade.com/spaced%20name") + +--~ test("zip:///oeps/oeps.zip#bla/bla.tex") +--~ test("zip:///oeps/oeps.zip?bla/bla.tex") + + +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['l-dir'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- dir.expand_name will be merged with cleanpath and collapsepath + local type = type -local find, gmatch = string.find, string.gmatch +local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local lpegmatch = lpeg.match dir = dir or { } +-- handy + +function dir.current() + return (gsub(lfs.currentdir(),"\\","/")) +end + -- optimizing for no string.find (*) does not save time local attributes = lfs.attributes @@ -2068,6 +2671,35 @@ end dir.glob_pattern = glob_pattern +local function collect_pattern(path,patt,recurse,result) + local ok, scanner + result = result or { } + if path == "/" then + ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe + else + ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe + end + if ok and type(scanner) == "function" then + if not find(path,"/$") then path = path .. '/' end + for name in scanner do + local full = path .. name + local attr = attributes(full) + local mode = attr.mode + if mode == 'file' then + if find(full,patt) then + result[name] = attr + end + elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then + attr.list = collect_pattern(full,patt,recurse) + result[name] = attr + end + end + end + return result +end + +dir.collect_pattern = collect_pattern + local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V local pattern = Ct { @@ -2087,29 +2719,48 @@ local filter = Cs ( ( )^0 ) local function glob(str,t) - if type(str) == "table" then - local t = t or { } - for s=1,#str do - glob(str[s],t) + if type(t) == "function" then + if type(str) == "table" then + for s=1,#str do + glob(str[s],t) + end + elseif lfs.isfile(str) then + t(str) + else + local split = lpegmatch(pattern,str) + if split then + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,t) + end end - return t - elseif lfs.isfile(str) then - local t = t or { } - t[#t+1] = str - return t else - local split = pattern:match(str) - if split then + if type(str) == "table" then + local t = t or { } + for s=1,#str do + glob(str[s],t) + end + return t + elseif lfs.isfile(str) then local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"%*%*") - local start = root .. path - local result = filter:match(start .. base) - glob_pattern(start,result,recurse,action) + t[#t+1] = str return t else - return { } + local split = lpegmatch(pattern,str) + if split then + local t = t or { } + local action = action or function(name) t[#t+1] = name end + local root, path, base = split[1], split[2], split[3] + local recurse = find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + glob_pattern(start,result,recurse,action) + return t + else + return { } + end end end end @@ -2171,11 +2822,12 @@ end local make_indeed = true -- false -if string.find(os.getenv("PATH"),";") then +if string.find(os.getenv("PATH"),";") then -- os.type == "windows" function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2186,13 +2838,13 @@ if string.find(os.getenv("PATH"),";") then end local first, middle, last local drive = false - first, middle, last = str:match("^(//)(//*)(.*)$") + first, middle, last = match(str,"^(//)(//*)(.*)$") if first then -- empty network path == local path else - first, last = str:match("^(//)/*(.-)$") + first, last = match(str,"^(//)/*(.-)$") if first then - middle, last = str:match("([^/]+)/+(.-)$") + middle, last = match(str,"([^/]+)/+(.-)$") if middle then pth = "//" .. middle else @@ -2200,11 +2852,11 @@ if string.find(os.getenv("PATH"),";") then last = "" end else - first, middle, last = str:match("^([a-zA-Z]:)(/*)(.-)$") + first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$") if first then pth, drive = first .. middle, true else - middle, last = str:match("^(/*)(.-)$") + middle, last = match(str,"^(/*)(.-)$") if not middle then last = str end @@ -2238,34 +2890,31 @@ if string.find(os.getenv("PATH"),";") then --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) - local first, nothing, last = str:match("^(//)(//*)(.*)$") + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath + local first, nothing, last = match(str,"^(//)(//*)(.*)$") if first then - first = lfs.currentdir() .. "/" - first = first:gsub("\\","/") + first = dir.current() .. "/" end if not first then - first, last = str:match("^(//)/*(.*)$") + first, last = match(str,"^(//)/*(.*)$") end if not first then - first, last = str:match("^([a-zA-Z]:)(.*)$") + first, last = match(str,"^([a-zA-Z]:)(.*)$") if first and not find(last,"^/") then local d = lfs.currentdir() if lfs.chdir(first) then - first = lfs.currentdir() - first = first:gsub("\\","/") + first = dir.current() end lfs.chdir(d) end end if not first then - first, last = lfs.currentdir(), str - first = first:gsub("\\","/") + first, last = dir.current(), str end - last = last:gsub("//","/") - last = last:gsub("/%./","/") - last = last:gsub("^/*","") - first = first:gsub("/*$","") + last = gsub(last,"//","/") + last = gsub(last,"/%./","/") + last = gsub(last,"^/*","") + first = gsub(first,"/*$","") if last == "" then return first else @@ -2276,8 +2925,9 @@ if string.find(os.getenv("PATH"),";") then else function dir.mkdirs(...) - local str, pth = "", "" - for _, s in ipairs({...}) do + local str, pth, t = "", "", { ... } + for i=1,#t do + local s = t[i] if s ~= "" then if str ~= "" then str = str .. "/" .. s @@ -2286,7 +2936,7 @@ else end end end - str = str:gsub("/+","/") + str = gsub(str,"/+","/") if find(str,"^/") then pth = "/" for s in gmatch(str,"[^/]+") do @@ -2320,12 +2970,12 @@ else --~ print(dir.mkdirs("///a/b/c")) --~ print(dir.mkdirs("a/bbb//ccc/")) - function dir.expand_name(str) + function dir.expand_name(str) -- will be merged with cleanpath and collapsepath if not find(str,"^/") then str = lfs.currentdir() .. "/" .. str end - str = str:gsub("//","/") - str = str:gsub("/%./","/") + str = gsub(str,"//","/") + str = gsub(str,"/%./","/") return str end @@ -2340,7 +2990,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-boolean'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2401,7 +3051,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-math'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2448,7 +3098,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-utils'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -2456,6 +3106,10 @@ if not modules then modules = { } end modules ['l-utils'] = { -- hm, quite unreadable +local gsub = string.gsub +local concat = table.concat +local type, next = type, next + if not utils then utils = { } end if not utils.merger then utils.merger = { } end if not utils.lua then utils.lua = { } end @@ -2493,7 +3147,7 @@ function utils.merger._self_load_(name) end if data and utils.merger.strip_comment then -- saves some 20K - data = data:gsub("%-%-~[^\n\r]*[\r\n]", "") + data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "") end return data or "" end @@ -2511,7 +3165,7 @@ end function utils.merger._self_swap_(data,code) if data ~= "" then - return (data:gsub(utils.merger.pattern, function(s) + return (gsub(data,utils.merger.pattern, function(s) return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n" end, 1)) else @@ -2521,8 +3175,8 @@ end --~ stripper: --~ ---~ data = string.gsub(data,"%-%-~[^\n]*\n","") ---~ data = string.gsub(data,"\n\n+","\n") +--~ data = gsub(data,"%-%-~[^\n]*\n","") +--~ data = gsub(data,"\n\n+","\n") function utils.merger._self_libs_(libs,list) local result, f, frozen = { }, nil, false @@ -2530,9 +3184,10 @@ function utils.merger._self_libs_(libs,list) if type(libs) == 'string' then libs = { libs } end if type(list) == 'string' then list = { list } end local foundpath = nil - for _, lib in ipairs(libs) do - for _, pth in ipairs(list) do - pth = string.gsub(pth,"\\","/") -- file.clean_path + for i=1,#libs do + local lib = libs[i] + for j=1,#list do + local pth = gsub(list[j],"\\","/") -- file.clean_path utils.report("checking library path %s",pth) local name = pth .. "/" .. lib if lfs.isfile(name) then @@ -2544,7 +3199,8 @@ function utils.merger._self_libs_(libs,list) if foundpath then utils.report("using library path %s",foundpath) local right, wrong = { }, { } - for _, lib in ipairs(libs) do + for i=1,#libs do + local lib = libs[i] local fullname = foundpath .. "/" .. lib if lfs.isfile(fullname) then -- right[#right+1] = lib @@ -2558,15 +3214,15 @@ function utils.merger._self_libs_(libs,list) end end if #right > 0 then - utils.report("merged libraries: %s",table.concat(right," ")) + utils.report("merged libraries: %s",concat(right," ")) end if #wrong > 0 then - utils.report("skipped libraries: %s",table.concat(wrong," ")) + utils.report("skipped libraries: %s",concat(wrong," ")) end else utils.report("no valid library path found") end - return table.concat(result, "\n\n") + return concat(result, "\n\n") end function utils.merger.selfcreate(libs,list,target) @@ -2624,16 +3280,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['l-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +-- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end + aux = aux or { } local concat, format, gmatch = table.concat, string.format, string.gmatch local tostring, type = tostring, type +local lpegmatch = lpeg.match + +local P, R, V = lpeg.P, lpeg.R, lpeg.V + +local escape, left, right = P("\\"), P('{'), P('}') + +lpeg.patterns.balanced = P { + [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0, + [2] = left * V(1) * right +} local space = lpeg.P(' ') local equal = lpeg.P("=") @@ -2641,7 +3309,7 @@ local comma = lpeg.P(",") local lbrace = lpeg.P("{") local rbrace = lpeg.P("}") local nobrace = 1 - (lbrace+rbrace) -local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace } +local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace } local spaces = space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) @@ -2679,13 +3347,13 @@ function aux.make_settings_to_hash_pattern(set,how) end end -function aux.settings_to_hash(str) +function aux.settings_to_hash(str,existing) if str and str ~= "" then - hash = { } + hash = existing or { } if moretolerant then - pattern_b_s:match(str) + lpegmatch(pattern_b_s,str) else - pattern_a_s:match(str) + lpegmatch(pattern_a_s,str) end return hash else @@ -2693,39 +3361,41 @@ function aux.settings_to_hash(str) end end -function aux.settings_to_hash_tolerant(str) +function aux.settings_to_hash_tolerant(str,existing) if str and str ~= "" then - hash = { } - pattern_b_s:match(str) + hash = existing or { } + lpegmatch(pattern_b_s,str) return hash else return { } end end -function aux.settings_to_hash_strict(str) +function aux.settings_to_hash_strict(str,existing) if str and str ~= "" then - hash = { } - pattern_c_s:match(str) + hash = existing or { } + lpegmatch(pattern_c_s,str) return next(hash) and hash else return nil end end -local seperator = comma * space^0 +local separator = comma * space^0 local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0) -local pattern = lpeg.Ct(value*(seperator*value)^0) +local pattern = lpeg.Ct(value*(separator*value)^0) -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored aux.settings_to_array_pattern = pattern +-- we could use a weak table as cache + function aux.settings_to_array(str) if not str or str == "" then return { } else - return pattern:match(str) + return lpegmatch(pattern,str) end end @@ -2734,10 +3404,10 @@ local function set(t,v) end local value = lpeg.P(lpeg.Carg(1)*value) / set -local pattern = value*(seperator*value)^0 * lpeg.Carg(1) +local pattern = value*(separator*value)^0 * lpeg.Carg(1) function aux.add_settings_to_array(t,str) - return pattern:match(str, nil, t) + return lpegmatch(pattern,str,nil,t) end function aux.hash_to_string(h,separator,yes,no,strict,omit) @@ -2785,6 +3455,13 @@ function aux.settings_to_set(str,t) return t end +local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace +local pattern = lpeg.Ct((space + value)^0) + +function aux.arguments_to_table(str) + return lpegmatch(pattern,str) +end + -- temporary here function aux.getparameters(self,class,parentclass,settings) @@ -2793,36 +3470,31 @@ function aux.getparameters(self,class,parentclass,settings) sc = table.clone(self[parent]) self[class] = sc end - aux.add_settings_to_array(sc, settings) + aux.settings_to_hash(settings,sc) end -- temporary here -local digit = lpeg.R("09") -local period = lpeg.P(".") -local zero = lpeg.P("0") - ---~ local finish = lpeg.P(-1) ---~ local nodigit = (1-digit) + finish ---~ local case_1 = (period * zero^1 * #nodigit)/"" -- .000 ---~ local case_2 = (period * (1-(zero^0/"") * #nodigit)^1 * (zero^0/"") * nodigit) -- .010 .10 .100100 - +local digit = lpeg.R("09") +local period = lpeg.P(".") +local zero = lpeg.P("0") local trailingzeros = zero^0 * -digit -- suggested by Roberto R -local case_1 = period * trailingzeros / "" -local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") - -local number = digit^1 * (case_1 + case_2) -local stripper = lpeg.Cs((number + 1)^0) +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = lpeg.Cs((number + 1)^0) --~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" --~ collectgarbage("collect") --~ str = string.rep(sample,10000) --~ local ts = os.clock() ---~ stripper:match(str) ---~ print(#str, os.clock()-ts, stripper:match(sample)) +--~ lpegmatch(stripper,str) +--~ print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + +lpeg.patterns.strip_zeros = stripper function aux.strip_zeros(str) - return stripper:match(str) + return lpegmatch(stripper,str) end function aux.definetable(target) -- defines undefined tables @@ -2846,85 +3518,433 @@ function aux.accesstable(target) return t end +--~ function string.commaseparated(str) +--~ return gmatch(str,"([^,%s]+)") +--~ end + +-- as we use this a lot ... + +--~ function aux.cachefunction(action,weak) +--~ local cache = { } +--~ if weak then +--~ setmetatable(cache, { __mode = "kv" } ) +--~ end +--~ local function reminder(str) +--~ local found = cache[str] +--~ if not found then +--~ found = action(str) +--~ cache[str] = found +--~ end +--~ return found +--~ end +--~ return reminder, cache +--~ end + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['lxml-tab'] = { +if not modules then modules = { } end modules ['trac-tra'] = { version = 1.001, - comment = "this module is the basis for the lxml-* ones", + comment = "companion to trac-tra.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } ---[[ldx-- -

The parser used here is inspired by the variant discussed in the lua book, but -handles comment and processing instructions, has a different structure, provides -parent access; a first version used different trickery but was less optimized to we -went this route. First we had a find based parser, now we have an based one. -The find based parser can be found in l-xml-edu.lua along with other older code.

- -

Expecially the lpath code is experimental, we will support some of xpath, but -only things that make sense for us; as compensation it is possible to hook in your -own functions. Apart from preprocessing content for we also need -this module for process management, like handling and -files.

- - -a/b/c /*/c -a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) -a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) - - -

Beware, the interface may change. For instance at, ns, tg, dt may get more -verbose names. Once the code is stable we will also remove some tracing and -optimize the code.

---ldx]]-- +-- the tag is kind of generic and used for functions that are not +-- bound to a variable, like node.new, node.copy etc (contrary to for instance +-- node.has_attribute which is bound to a has_attribute local variable in mkiv) -xml = xml or { } +local debug = require "debug" ---~ local xml = xml +local getinfo = debug.getinfo +local type, next = type, next +local concat = table.concat +local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub -local concat, remove, insert = table.concat, table.remove, table.insert -local type, next, setmetatable = type, next, setmetatable -local format, lower, find = string.format, string.lower, string.find +debugger = debugger or { } ---[[ldx-- -

This module can be used stand alone but also inside in -which case it hooks into the tracker code. Therefore we provide a few -functions that set the tracers.

---ldx]]-- +local counters = { } +local names = { } -local trace_remap = false +-- one -if trackers then - trackers.register("xml.remap", function(v) trace_remap = v end) +local function hook() + local f = getinfo(2,"f").func + local n = getinfo(2,"Sn") +-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end + if f then + local cf = counters[f] + if cf == nil then + counters[f] = 1 + names[f] = n + else + counters[f] = cf + 1 + end + end end - -function xml.settrace(str,value) - if str == "remap" then - trace_remap = value or false +local function getname(func) + local n = names[func] + if n then + if n.what == "C" then + return n.name or '' + else + -- source short_src linedefined what name namewhat nups func + local name = n.name or n.namewhat or n.what + if not name or name == "" then name = "?" end + return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name) + end + else + return "unknown" + end +end +function debugger.showstats(printer,threshold) + printer = printer or texio.write or print + threshold = threshold or 0 + local total, grandtotal, functions = 0, 0, 0 + printer("\n") -- ugly but ok + -- table.sort(counters) + for func, count in next, counters do + if count > threshold then + local name = getname(func) + if not find(name,"for generator") then + printer(format("%8i %s", count, name)) + total = total + count + end + end + grandtotal = grandtotal + count + functions = functions + 1 end + printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) end ---[[ldx-- -

First a hack to enable namespace resolving. A namespace is characterized by -a . The following function associates a namespace prefix with a -pattern. We use , which in this case is more than twice as fast as a -find based solution where we loop over an array of patterns. Less code and -much cleaner.

---ldx]]-- +-- two -xml.xmlns = xml.xmlns or { } +--~ local function hook() +--~ local n = getinfo(2) +--~ if n.what=="C" and not n.name then +--~ local f = tostring(debug.traceback()) +--~ local cf = counters[f] +--~ if cf == nil then +--~ counters[f] = 1 +--~ names[f] = n +--~ else +--~ counters[f] = cf + 1 +--~ end +--~ end +--~ end +--~ function debugger.showstats(printer,threshold) +--~ printer = printer or texio.write or print +--~ threshold = threshold or 0 +--~ local total, grandtotal, functions = 0, 0, 0 +--~ printer("\n") -- ugly but ok +--~ -- table.sort(counters) +--~ for func, count in next, counters do +--~ if count > threshold then +--~ printer(format("%8i %s", count, func)) +--~ total = total + count +--~ end +--~ grandtotal = grandtotal + count +--~ functions = functions + 1 +--~ end +--~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) +--~ end -local check = lpeg.P(false) -local parse = check +-- rest ---[[ldx-- -

The next function associates a namespace prefix with an . This -normally happens independent of parsing.

+function debugger.savestats(filename,threshold) + local f = io.open(filename,'w') + if f then + debugger.showstats(function(str) f:write(str) end,threshold) + f:close() + end +end + +function debugger.enable() + debug.sethook(hook,"c") +end + +function debugger.disable() + debug.sethook() +--~ counters[debug.getinfo(2,"f").func] = nil +end + +function debugger.tracing() + local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0 + if n > 0 then + function debugger.tracing() return true end ; return true + else + function debugger.tracing() return false end ; return false + end +end + +--~ debugger.enable() + +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) +--~ print(math.sin(1*.5)) + +--~ debugger.disable() + +--~ print("") +--~ debugger.showstats() +--~ print("") +--~ debugger.showstats(print,3) + +setters = setters or { } +setters.data = setters.data or { } + +--~ local function set(t,what,value) +--~ local data, done = t.data, t.done +--~ if type(what) == "string" then +--~ what = aux.settings_to_array(what) -- inefficient but ok +--~ end +--~ for i=1,#what do +--~ local w = what[i] +--~ for d, f in next, data do +--~ if done[d] then +--~ -- prevent recursion due to wildcards +--~ elseif find(d,w) then +--~ done[d] = true +--~ for i=1,#f do +--~ f[i](value) +--~ end +--~ end +--~ end +--~ end +--~ end + +local function set(t,what,value) + local data, done = t.data, t.done + if type(what) == "string" then + what = aux.settings_to_hash(what) -- inefficient but ok + end + for w, v in next, what do + if v == "" then + v = value + else + v = toboolean(v) + end + for d, f in next, data do + if done[d] then + -- prevent recursion due to wildcards + elseif find(d,w) then + done[d] = true + for i=1,#f do + f[i](v) + end + end + end + end +end + +local function reset(t) + for d, f in next, t.data do + for i=1,#f do + f[i](false) + end + end +end + +local function enable(t,what) + set(t,what,true) +end + +local function disable(t,what) + local data = t.data + if not what or what == "" then + t.done = { } + reset(t) + else + set(t,what,false) + end +end + +function setters.register(t,what,...) + local data = t.data + what = lower(what) + local w = data[what] + if not w then + w = { } + data[what] = w + end + for _, fnc in next, { ... } do + local typ = type(fnc) + if typ == "function" then + w[#w+1] = fnc + elseif typ == "string" then + w[#w+1] = function(value) set(t,fnc,value,nesting) end + end + end +end + +function setters.enable(t,what) + local e = t.enable + t.enable, t.done = enable, { } + enable(t,string.simpleesc(tostring(what))) + t.enable, t.done = e, { } +end + +function setters.disable(t,what) + local e = t.disable + t.disable, t.done = disable, { } + disable(t,string.simpleesc(tostring(what))) + t.disable, t.done = e, { } +end + +function setters.reset(t) + t.done = { } + reset(t) +end + +function setters.list(t) -- pattern + local list = table.sortedkeys(t.data) + local user, system = { }, { } + for l=1,#list do + local what = list[l] + if find(what,"^%*") then + system[#system+1] = what + else + user[#user+1] = what + end + end + return user, system +end + +function setters.show(t) + commands.writestatus("","") + local list = setters.list(t) + for k=1,#list do + commands.writestatus(t.name,list[k]) + end + commands.writestatus("","") +end + +-- we could have used a bit of oo and the trackers:enable syntax but +-- there is already a lot of code around using the singular tracker + +-- we could make this into a module + +function setters.new(name) + local t + t = { + data = { }, + name = name, + enable = function(...) setters.enable (t,...) end, + disable = function(...) setters.disable (t,...) end, + register = function(...) setters.register(t,...) end, + list = function(...) setters.list (t,...) end, + show = function(...) setters.show (t,...) end, + } + setters.data[name] = t + return t +end + +trackers = setters.new("trackers") +directives = setters.new("directives") +experiments = setters.new("experiments") + +-- nice trick: we overload two of the directives related functions with variants that +-- do tracing (itself using a tracker) .. proof of concept + +local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end) +local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end) + +local e = directives.enable +local d = directives.disable + +function directives.enable(...) + commands.writestatus("directives","enabling: %s",concat({...}," ")) + e(...) +end + +function directives.disable(...) + commands.writestatus("directives","disabling: %s",concat({...}," ")) + d(...) +end + +local e = experiments.enable +local d = experiments.disable + +function experiments.enable(...) + commands.writestatus("experiments","enabling: %s",concat({...}," ")) + e(...) +end + +function experiments.disable(...) + commands.writestatus("experiments","disabling: %s",concat({...}," ")) + d(...) +end + +-- a useful example + +directives.register("system.nostatistics", function(v) + statistics.enable = not v +end) + + + +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['lxml-tab'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc +-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the +-- trouble + +local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) + +--[[ldx-- +

The parser used here is inspired by the variant discussed in the lua book, but +handles comment and processing instructions, has a different structure, provides +parent access; a first version used different trickery but was less optimized to we +went this route. First we had a find based parser, now we have an based one. +The find based parser can be found in l-xml-edu.lua along with other older code.

+ +

Beware, the interface may change. For instance at, ns, tg, dt may get more +verbose names. Once the code is stable we will also remove some tracing and +optimize the code.

+--ldx]]-- + +xml = xml or { } + +--~ local xml = xml + +local concat, remove, insert = table.concat, table.remove, table.insert +local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber +local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub +local utfchar = unicode.utf8.char +local lpegmatch = lpeg.match +local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs + +--[[ldx-- +

First a hack to enable namespace resolving. A namespace is characterized by +a . The following function associates a namespace prefix with a +pattern. We use , which in this case is more than twice as fast as a +find based solution where we loop over an array of patterns. Less code and +much cleaner.

+--ldx]]-- + +xml.xmlns = xml.xmlns or { } + +local check = P(false) +local parse = check + +--[[ldx-- +

The next function associates a namespace prefix with an . This +normally happens independent of parsing.

xml.registerns("mml","mathml") @@ -2932,8 +3952,8 @@ xml.registerns("mml","mathml") --ldx]]-- function xml.registerns(namespace, pattern) -- pattern can be an lpeg - check = check + lpeg.C(lpeg.P(lower(pattern))) / namespace - parse = lpeg.P { lpeg.P(check) + 1 * lpeg.V(1) } + check = check + C(P(lower(pattern))) / namespace + parse = P { P(check) + 1 * V(1) } end --[[ldx-- @@ -2947,7 +3967,7 @@ xml.checkns("m","http://www.w3.org/mathml") --ldx]]-- function xml.checkns(namespace,url) - local ns = parse:match(lower(url)) + local ns = lpegmatch(parse,lower(url)) if ns and namespace ~= ns then xml.xmlns[namespace] = ns end @@ -2965,7 +3985,7 @@ This returns mml. --ldx]]-- function xml.resolvens(url) - return parse:match(lower(url)) or "" + return lpegmatch(parse,lower(url)) or "" end --[[ldx-- @@ -3004,27 +4024,36 @@ local x = xml.convert(somestring)

An optional second boolean argument tells this function not to create a root element.

---ldx]]-- -xml.strip_cm_and_dt = false -- an extra global flag, in case we have many includes +

Valid entities are:

+ + + + + + +--ldx]]-- -- not just one big nested table capture (lpeg overflow) local nsremap, resolvens = xml.xmlns, xml.resolvens -local stack, top, dt, at, xmlns, errorstr, entities = {}, {}, {}, {}, {}, nil, {} +local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { } +local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false +local dcache, hcache, acache = { }, { }, { } -local mt = { __tostring = xml.text } +local mt = { } -function xml.check_error(top,toclose) - return "" +function initialize_mt(root) + mt = { __index = root } -- will be redefined later end -local strip = false -local cleanup = false +function xml.setproperty(root,k,v) + getmetatable(root).__index[k] = v +end -function xml.set_text_cleanup(fnc) - cleanup = fnc +function xml.check_error(top,toclose) + return "" end local function add_attribute(namespace,tag,value) @@ -3034,12 +4063,31 @@ local function add_attribute(namespace,tag,value) if tag == "xmlns" then xmlns[#xmlns+1] = resolvens(value) at[tag] = value + elseif namespace == "" then + at[tag] = value elseif namespace == "xmlns" then xml.checkns(tag,value) at["xmlns:" .. tag] = value else - at[tag] = value + -- for the moment this way: + at[namespace .. ":" .. tag] = value + end +end + +local function add_empty(spacing, namespace, tag) + if #spacing > 0 then + dt[#dt+1] = spacing + end + local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace + top = stack[#stack] + dt = top.dt + local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } + dt[#dt+1] = t + setmetatable(t, mt) + if at.xmlns then + remove(xmlns) end + at = { } end local function add_begin(spacing, namespace, tag) @@ -3067,28 +4115,12 @@ local function add_end(spacing, namespace, tag) end dt = top.dt dt[#dt+1] = toclose - dt[0] = top + -- dt[0] = top -- nasty circular reference when serializing table if toclose.at.xmlns then remove(xmlns) end end -local function add_empty(spacing, namespace, tag) - if #spacing > 0 then - dt[#dt+1] = spacing - end - local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace - top = stack[#stack] - dt = top.dt - local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top } - dt[#dt+1] = t - setmetatable(t, mt) - if at.xmlns then - remove(xmlns) - end - at = { } -end - local function add_text(text) if cleanup and #text > 0 then dt[#dt+1] = cleanup(text) @@ -3104,7 +4136,7 @@ local function add_special(what, spacing, text) if strip and (what == "@cm@" or what == "@dt@") then -- forget it else - dt[#dt+1] = { special=true, ns="", tg=what, dt={text} } + dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } } end end @@ -3112,42 +4144,260 @@ local function set_message(txt) errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","") end -local P, S, R, C, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V +local reported_attribute_errors = { } -local space = S(' \r\n\t') -local open = P('<') -local close = P('>') -local squote = S("'") -local dquote = S('"') -local equal = P('=') -local slash = P('/') -local colon = P(':') -local valid = R('az', 'AZ', '09') + S('_-.') -local name_yes = C(valid^1) * colon * C(valid^1) -local name_nop = C(P(true)) * C(valid^1) -local name = name_yes + name_nop +local function attribute_value_error(str) + if not reported_attribute_errors[str] then + logs.report("xml","invalid attribute value: %q",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end +local function attribute_specification_error(str) + if not reported_attribute_errors[str] then + logs.report("xml","invalid attribute specification: %q",str) + reported_attribute_errors[str] = true + at._error_ = str + end + return str +end -local utfbom = P('\000\000\254\255') + P('\255\254\000\000') + - P('\255\254') + P('\254\255') + P('\239\187\191') -- no capture +function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or format("&%s;",str) end +function xml.unknown_hex_entity_format(str) return format("&#x%s;",str) end +function xml.unknown_any_entity_format(str) return format("&#x%s;",str) end -local spacing = C(space^0) -local justtext = C((1-open)^1) -local somespace = space^1 -local optionalspace = space^0 +local function fromhex(s) + local n = tonumber(s,16) + if n then + return utfchar(n) + else + return format("h:%s",s), true + end +end -local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -local attribute = (somespace * name * optionalspace * equal * optionalspace * value) / add_attribute -local attributes = attribute^0 +local function fromdec(s) + local n = tonumber(s) + if n then + return utfchar(n) + else + return format("d:%s",s), true + end +end -local text = justtext / add_text -local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example +-- one level expansion (simple case), no checking done -local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty -local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin -local endelement = (spacing * open * slash * name * optionalspace * close) / add_end +local rest = (1-P(";"))^0 +local many = P(1)^0 -local begincomment = open * P("!--") -local endcomment = P("--") * close +local parsedentity = + P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) + + (P("#x")*(many/fromhex) + P("#")*(many/fromdec)) + +-- parsing in the xml file + +local predefined_unified = { + [38] = "&", + [42] = """, + [47] = "'", + [74] = "<", + [76] = "&gr;", +} + +local predefined_simplified = { + [38] = "&", amp = "&", + [42] = '"', quot = '"', + [47] = "'", apos = "'", + [74] = "<", lt = "<", + [76] = ">", gt = ">", +} + +local function handle_hex_entity(str) + local h = hcache[str] + if not h then + local n = tonumber(str,16) + h = unify_predefined and predefined_unified[n] + if h then + if trace_entities then + logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h) + end + elseif utfize then + h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or "" + if not n then + logs.report("xml","utfize, ignoring hex entity &#x%s;",str) + elseif trace_entities then + logs.report("xml","utfize, converting hex entity &#x%s; into %s",str,h) + end + else + if trace_entities then + logs.report("xml","found entity &#x%s;",str) + end + h = "&#x" .. str .. ";" + end + hcache[str] = h + end + return h +end + +local function handle_dec_entity(str) + local d = dcache[str] + if not d then + local n = tonumber(str) + d = unify_predefined and predefined_unified[n] + if d then + if trace_entities then + logs.report("xml","utfize, converting dec entity &#%s; into %s",str,d) + end + elseif utfize then + d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or "" + if not n then + logs.report("xml","utfize, ignoring dec entity &#%s;",str) + elseif trace_entities then + logs.report("xml","utfize, converting dec entity &#%s; into %s",str,h) + end + else + if trace_entities then + logs.report("xml","found entity &#%s;",str) + end + d = "&#" .. str .. ";" + end + dcache[str] = d + end + return d +end + +xml.parsedentitylpeg = parsedentity + +local function handle_any_entity(str) + if resolve then + local a = acache[str] -- per instance ! todo + if not a then + a = resolve_predefined and predefined_simplified[str] + if a then + -- one of the predefined + elseif type(resolve) == "function" then + a = resolve(str) or entities[str] + else + a = entities[str] + end + if a then + if trace_entities then + logs.report("xml","resolved entity &%s; -> %s (internal)",str,a) + end + a = lpegmatch(parsedentity,a) or a + else + if xml.unknown_any_entity_format then + a = xml.unknown_any_entity_format(str) or "" + end + if a then + if trace_entities then + logs.report("xml","resolved entity &%s; -> %s (external)",str,a) + end + else + if trace_entities then + logs.report("xml","keeping entity &%s;",str) + end + if str == "" then + a = "&error;" + else + a = "&" .. str .. ";" + end + end + end + acache[str] = a + elseif trace_entities then + if not acache[str] then + logs.report("xml","converting entity &%s; into %s",str,a) + acache[str] = a + end + end + return a + else + local a = acache[str] + if not a then + if trace_entities then + logs.report("xml","found entity &%s;",str) + end + a = resolve_predefined and predefined_simplified[str] + if a then + -- one of the predefined + acache[str] = a + elseif str == "" then + a = "&error;" + acache[str] = a + else + a = "&" .. str .. ";" + acache[str] = a + end + end + return a + end +end + +local function handle_end_entity(chr) + logs.report("xml","error in entity, %q found instead of ';'",chr) +end + +local space = S(' \r\n\t') +local open = P('<') +local close = P('>') +local squote = S("'") +local dquote = S('"') +local equal = P('=') +local slash = P('/') +local colon = P(':') +local semicolon = P(';') +local ampersand = P('&') +local valid = R('az', 'AZ', '09') + S('_-.') +local name_yes = C(valid^1) * colon * C(valid^1) +local name_nop = C(P(true)) * C(valid^1) +local name = name_yes + name_nop +local utfbom = lpeg.patterns.utfbom -- no capture +local spacing = C(space^0) + +----- entitycontent = (1-open-semicolon)^0 +local anyentitycontent = (1-open-semicolon-space-close)^0 +local hexentitycontent = R("AF","af","09")^0 +local decentitycontent = R("09")^0 +local parsedentity = P("#")/"" * ( + P("x")/"" * (hexentitycontent/handle_hex_entity) + + (decentitycontent/handle_dec_entity) + ) + (anyentitycontent/handle_any_entity) +local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity)) + +local text_unparsed = C((1-open)^1) +local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1) + +local somespace = space^1 +local optionalspace = space^0 + +----- value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value +local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value + +local endofattributes = slash * close + close -- recovery of flacky html +local whatever = space * name * optionalspace * equal +local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error +----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error +----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error +local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error + +local attributevalue = value + wrongvalue + +local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute +----- attributes = (attribute)^0 + +local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0 + +local parsedtext = text_parsed / add_text +local unparsedtext = text_unparsed / add_text +local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example + +local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty +local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin +local endelement = (spacing * open * slash * name * optionalspace * close) / add_end + +local begincomment = open * P("!--") +local endcomment = P("--") * close local begininstruction = open * P("?") local endinstruction = P("?") * close local begincdata = open * P("![CDATA[") @@ -3157,19 +4407,27 @@ local someinstruction = C((1 - endinstruction)^0) local somecomment = C((1 - endcomment )^0) local somecdata = C((1 - endcdata )^0) -local function entity(k,v) entities[k] = v end +local function normalentity(k,v ) entities[k] = v end +local function systementity(k,v,n) entities[k] = v end +local function publicentity(k,v,n) entities[k] = v end local begindoctype = open * P("!DOCTYPE") local enddoctype = close local beginset = P("[") local endset = P("]") -local doctypename = C((1-somespace)^0) +local doctypename = C((1-somespace-close)^0) local elementdoctype = optionalspace * P(" & + cleanup = settings.text_cleanup + stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { } + acache, hcache, dcache = { }, { }, { } -- not stored + reported_attribute_errors = { } + if settings.parent_root then + mt = getmetatable(settings.parent_root) + else + initialize_mt(top) + end stack[#stack+1] = top top.dt = { } dt = top.dt if not data or data == "" then errorstr = "empty xml file" - elseif not grammar:match(data) then - errorstr = "invalid xml file" + elseif utfize or resolve then + if lpegmatch(grammar_parsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - parsed text" + end + elseif type(data) == "string" then + if lpegmatch(grammar_unparsed_text,data) then + errorstr = "" + else + errorstr = "invalid xml file - unparsed text" + end else - errorstr = "" + errorstr = "invalid xml file - no text at all" end if errorstr and errorstr ~= "" then - result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={}, er = true } }, error = true } + result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } } setmetatable(stack, mt) - if xml.error_handler then xml.error_handler("load",errorstr) end + local error_handler = settings.error_handler + if error_handler == false then + -- no error message + else + error_handler = error_handler or xml.error_handler + if error_handler then + xml.error_handler("load",errorstr) + end + end else result = stack[1] end - if not no_root then - result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={}, entities = entities } + if not settings.no_root then + result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings } setmetatable(result, mt) local rdt = result.dt for k=1,#rdt do local v = rdt[k] if type(v) == "table" and not v.special then -- always table -) result.ri = k -- rootindex +v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this ! break end end end + if errorstr and errorstr ~= "" then + result.error = true + end return result end +xml.convert = xmlconvert + +function xml.inheritedconvert(data,xmldata) + local settings = xmldata.settings + settings.parent_root = xmldata -- to be tested + -- settings.no_root = true + local xc = xmlconvert(data,settings) + -- xc.settings = nil + -- xc.entities = nil + -- xc.special = nil + -- xc.ri = nil + -- print(xc.tg) + return xc +end + --[[ldx--

Packaging data in an xml like table is done with the following function. Maybe it will go away (when not used).

@@ -3243,7 +4557,7 @@ function xml.is_valid(root) end function xml.package(tag,attributes,data) - local ns, tg = tag:match("^(.-):?([^:]+)$") + local ns, tg = match(tag,"^(.-):?([^:]+)$") local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} } setmetatable(t, mt) return t @@ -3261,21 +4575,19 @@ the whole file first. The function accepts a string representing a filename or a file handle.

--ldx]]-- -function xml.load(filename) +function xml.load(filename,settings) + local data = "" if type(filename) == "string" then + -- local data = io.loaddata(filename) - -todo: check type in io.loaddata local f = io.open(filename,'r') if f then - local root = xml.convert(f:read("*all")) + data = f:read("*all") f:close() - return root - else - return xml.convert("") end elseif filename then -- filehandle - return xml.convert(filename:read("*all")) - else - return xml.convert("") + data = filename:read("*all") end + return xmlconvert(data,settings) end --[[ldx-- @@ -3283,9 +4595,11 @@ end valid trees, which is what the next function does.

--ldx]]-- +local no_root = { no_root = true } + function xml.toxml(data) if type(data) == "string" then - local root = { xml.convert(data,true) } + local root = { xmlconvert(data,no_root) } return (#root > 1 and root) or root[1] else return data @@ -3305,7 +4619,7 @@ local function copy(old,tables) if not tables[old] then tables[old] = new end - for k,v in pairs(old) do + for k,v in next, old do new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v end local mt = getmetatable(old) @@ -3330,175 +4644,12 @@ alternative.

-- todo: add when not present -local fallbackhandle = (tex and tex.sprint) or io.write - -local function serialize(e, handle, textconverter, attributeconverter, specialconverter, nocommands) - if not e then - return - elseif not nocommands then - local ec = e.command - if ec ~= nil then -- we can have all kind of types - if e.special then - local etg, edt = e.tg, e.dt - local spc = specialconverter and specialconverter[etg] - if spc then - local result = spc(edt[1]) - if result then - handle(result) - return - else - -- no need to handle any further - end - end - end - local xc = xml.command - if xc then - xc(e,ec) - return - end - end - end - handle = handle or fallbackhandle - local etg = e.tg - if etg then - if e.special then - local edt = e.dt - local spc = specialconverter and specialconverter[etg] - if spc then - local result = spc(edt[1]) - if result then - handle(result) - else - -- no need to handle any further - end - elseif etg == "@pi@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@cm@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@cd@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@dt@" then - -- handle(format("",edt[1])) - handle("") - elseif etg == "@rt@" then - serialize(edt,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - else - local ens, eat, edt, ern = e.ns, e.at, e.dt, e.rn - local ats = eat and next(eat) and { } -- type test maybe faster - if ats then - if attributeconverter then - for k,v in next, eat do - ats[#ats+1] = format('%s=%q',k,attributeconverter(v)) - end - else - for k,v in next, eat do - ats[#ats+1] = format('%s=%q',k,v) - end - end - end - if ern and trace_remap and ern ~= ens then - ens = ern - end - if ens ~= "" then - if edt and #edt > 0 then - if ats then - -- handle(format("<%s:%s %s>",ens,etg,concat(ats," "))) - handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. ">") - else - -- handle(format("<%s:%s>",ens,etg)) - handle("<" .. ens .. ":" .. etg .. ">") - end - for i=1,#edt do - local e = edt[i] - if type(e) == "string" then - if textconverter then - handle(textconverter(e)) - else - handle(e) - end - else - serialize(e,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - -- handle(format("",ens,etg)) - handle("") - else - if ats then - -- handle(format("<%s:%s %s/>",ens,etg,concat(ats," "))) - handle("<" .. ens .. ":" .. etg .. " " .. concat(ats," ") .. "/>") - else - -- handle(format("<%s:%s/>",ens,etg)) - handle("<" .. ens .. ":" .. etg .. "/>") - end - end - else - if edt and #edt > 0 then - if ats then - -- handle(format("<%s %s>",etg,concat(ats," "))) - handle("<" .. etg .. " " .. concat(ats," ") .. ">") - else - -- handle(format("<%s>",etg)) - handle("<" .. etg .. ">") - end - for i=1,#edt do - local ei = edt[i] - if type(ei) == "string" then - if textconverter then - handle(textconverter(ei)) - else - handle(ei) - end - else - serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - -- handle(format("",etg)) - handle("") - else - if ats then - -- handle(format("<%s %s/>",etg,concat(ats," "))) - handle("<" .. etg .. " " .. concat(ats," ") .. "/>") - else - -- handle(format("<%s/>",etg)) - handle("<" .. etg .. "/>") - end - end - end - end - elseif type(e) == "string" then - if textconverter then - handle(textconverter(e)) - else - handle(e) - end - else - for i=1,#e do - local ei = e[i] - if type(ei) == "string" then - if textconverter then - handle(textconverter(ei)) - else - handle(ei) - end - else - serialize(ei,handle,textconverter,attributeconverter,specialconverter,nocommands) - end - end - end -end - -xml.serialize = serialize - function xml.checkbom(root) -- can be made faster if root.ri then local dt, found = root.dt, false for k=1,#dt do local v = dt[k] - if type(v) == "table" and v.special and v.tg == "@pi" and find(v.dt,"xml.*version=") then + if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then found = true break end @@ -3515,111 +4666,356 @@ end and then handle the lot.

--ldx]]-- -function xml.tostring(root) -- 25% overhead due to collecting - if root then - if type(root) == 'string' then - return root - elseif next(root) then -- next is faster than type (and >0 test) - local result = { } - serialize(root,function(s) result[#result+1] = s end) -- brrr, slow (direct printing is faster) - return concat(result,"") +-- new experimental reorganized serialize + +local function verbose_element(e,handlers) + local handle = handlers.handle + local serialize = handlers.serialize + local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn + local ats = eat and next(eat) and { } + if ats then + for k,v in next, eat do + ats[#ats+1] = format('%s=%q',k,v) end end - return "" -end - ---[[ldx-- -

The next function operated on the content only and needs a handle function -that accepts a string.

---ldx]]-- - -function xml.string(e,handle) - if not handle or (e.special and e.tg ~= "@rt@") then - -- nothing - elseif e.tg then - local edt = e.dt - if edt then + if ern and trace_remap and ern ~= ens then + ens = ern + end + if ens ~= "" then + if edt and #edt > 0 then + if ats then + handle("<",ens,":",etg," ",concat(ats," "),">") + else + handle("<",ens,":",etg,">") + end for i=1,#edt do - xml.string(edt[i],handle) + local e = edt[i] + if type(e) == "string" then + handle(e) + else + serialize(e,handlers) + end + end + handle("") + else + if ats then + handle("<",ens,":",etg," ",concat(ats," "),"/>") + else + handle("<",ens,":",etg,"/>") end end else - handle(e) + if edt and #edt > 0 then + if ats then + handle("<",etg," ",concat(ats," "),">") + else + handle("<",etg,">") + end + for i=1,#edt do + local ei = edt[i] + if type(ei) == "string" then + handle(ei) + else + serialize(ei,handlers) + end + end + handle("") + else + if ats then + handle("<",etg," ",concat(ats," "),"/>") + else + handle("<",etg,"/>") + end + end end end ---[[ldx-- -

How you deal with saving data depends on your preferences. For a 40 MB database -file the timing on a 2.3 Core Duo are as follows (time in seconds):

- - -1.3 : load data from file to string -6.1 : convert string into tree -5.3 : saving in file using xmlsave -6.8 : converting to string using xml.tostring -3.6 : saving converted string in file - +local function verbose_pi(e,handlers) + handlers.handle("") +end -

The save function is given below.

---ldx]]-- +local function verbose_comment(e,handlers) + handlers.handle("") +end -function xml.save(root,name) - local f = io.open(name,"w") - if f then - xml.serialize(root,function(s) f:write(s) end) - f:close() - end +local function verbose_cdata(e,handlers) + handlers.handle("") end ---[[ldx-- -

A few helpers:

---ldx]]-- +local function verbose_doctype(e,handlers) + handlers.handle("") +end -function xml.body(root) - return (root.ri and root.dt[root.ri]) or root +local function verbose_root(e,handlers) + handlers.serialize(e.dt,handlers) end -function xml.text(root) - return (root and xml.tostring(root)) or "" +local function verbose_text(e,handlers) + handlers.handle(e) end -function xml.content(root) -- bugged - return (root and root.dt and xml.tostring(root.dt)) or "" +local function verbose_document(e,handlers) + local serialize = handlers.serialize + local functions = handlers.functions + for i=1,#e do + local ei = e[i] + if type(ei) == "string" then + functions["@tx@"](ei,handlers) + else + serialize(ei,handlers) + end + end end -function xml.isempty(root, pattern) - if pattern == "" or pattern == "*" then - pattern = nil +local function serialize(e,handlers,...) + local initialize = handlers.initialize + local finalize = handlers.finalize + local functions = handlers.functions + if initialize then + local state = initialize(...) + if not state == true then + return state + end end - if pattern then - -- todo - return false + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) else - return not root or not root.dt or #root.dt == 0 or root.dt == "" + functions["@dc@"](e,handlers) + end + if finalize then + return finalize() end end ---[[ldx-- -

The next helper erases an element but keeps the table as it is, -and since empty strings are not serialized (effectively) it does -not harm. Copying the table would take more time. Usage:

- - -dt[k] = xml.empty() or xml.empty(dt,k) - ---ldx]]-- - -function xml.empty(dt,k) - if dt and k then - dt[k] = "" - return dt[k] +local function xserialize(e,handlers) + local functions = handlers.functions + local etg = e.tg + if etg then + (functions[etg] or functions["@el@"])(e,handlers) + -- elseif type(e) == "string" then + -- functions["@tx@"](e,handlers) else - return "" + functions["@dc@"](e,handlers) end end ---[[ldx-- -

The next helper assigns a tree (or string). Usage:

+local handlers = { } + +local function newhandlers(settings) + local t = table.copy(handlers.verbose or { }) -- merge + if settings then + for k,v in next, settings do + if type(v) == "table" then + tk = t[k] if not tk then tk = { } t[k] = tk end + for kk,vv in next, v do + tk[kk] = vv + end + else + t[k] = v + end + end + if settings.name then + handlers[settings.name] = t + end + end + return t +end + +local nofunction = function() end + +function xml.sethandlersfunction(handler,name,fnc) + handler.functions[name] = fnc or nofunction +end + +function xml.gethandlersfunction(handler,name) + return handler.functions[name] +end + +function xml.gethandlers(name) + return handlers[name] +end + +newhandlers { + name = "verbose", + initialize = false, -- faster than nil and mt lookup + finalize = false, -- faster than nil and mt lookup + serialize = xserialize, + handle = print, + functions = { + ["@dc@"] = verbose_document, + ["@dt@"] = verbose_doctype, + ["@rt@"] = verbose_root, + ["@el@"] = verbose_element, + ["@pi@"] = verbose_pi, + ["@cm@"] = verbose_comment, + ["@cd@"] = verbose_cdata, + ["@tx@"] = verbose_text, + } +} + +--[[ldx-- +

How you deal with saving data depends on your preferences. For a 40 MB database +file the timing on a 2.3 Core Duo are as follows (time in seconds):

+ + +1.3 : load data from file to string +6.1 : convert string into tree +5.3 : saving in file using xmlsave +6.8 : converting to string using xml.tostring +3.6 : saving converted string in file + + +

Beware, these were timing with the old routine but measurements will not be that +much different I guess.

+--ldx]]-- + +-- maybe this will move to lxml-xml + +local result + +local xmlfilehandler = newhandlers { + name = "file", + initialize = function(name) result = io.open(name,"wb") return result end, + finalize = function() result:close() return true end, + handle = function(...) result:write(...) end, +} + +-- no checking on writeability here but not faster either +-- +-- local xmlfilehandler = newhandlers { +-- initialize = function(name) io.output(name,"wb") return true end, +-- finalize = function() io.close() return true end, +-- handle = io.write, +-- } + + +function xml.save(root,name) + serialize(root,xmlfilehandler,name) +end + +local result + +local xmlstringhandler = newhandlers { + name = "string", + initialize = function() result = { } return result end, + finalize = function() return concat(result) end, + handle = function(...) result[#result+1] = concat { ... } end +} + +local function xmltostring(root) -- 25% overhead due to collecting + if root then + if type(root) == 'string' then + return root + else -- if next(root) then -- next is faster than type (and >0 test) + return serialize(root,xmlstringhandler) or "" + end + end + return "" +end + +local function xmltext(root) -- inline + return (root and xmltostring(root)) or "" +end + +function initialize_mt(root) + mt = { __tostring = xmltext, __index = root } +end + +xml.defaulthandlers = handlers +xml.newhandlers = newhandlers +xml.serialize = serialize +xml.tostring = xmltostring + +--[[ldx-- +

The next function operated on the content only and needs a handle function +that accepts a string.

+--ldx]]-- + +local function xmlstring(e,handle) + if not handle or (e.special and e.tg ~= "@rt@") then + -- nothing + elseif e.tg then + local edt = e.dt + if edt then + for i=1,#edt do + xmlstring(edt[i],handle) + end + end + else + handle(e) + end +end + +xml.string = xmlstring + +--[[ldx-- +

A few helpers:

+--ldx]]-- + +--~ xmlsetproperty(root,"settings",settings) + +function xml.settings(e) + while e do + local s = e.settings + if s then + return s + else + e = e.__p__ + end + end + return nil +end + +function xml.root(e) + local r = e + while e do + e = e.__p__ + if e then + r = e + end + end + return r +end + +function xml.parent(root) + return root.__p__ +end + +function xml.body(root) + return (root.ri and root.dt[root.ri]) or root -- not ok yet +end + +function xml.name(root) + if not root then + return "" + elseif root.ns == "" then + return root.tg + else + return root.ns .. ":" .. root.tg + end +end + +--[[ldx-- +

The next helper erases an element but keeps the table as it is, +and since empty strings are not serialized (effectively) it does +not harm. Copying the table would take more time. Usage:

+--ldx]]-- + +function xml.erase(dt,k) + if dt then + if k then + dt[k] = "" + else for k=1,#dt do + dt[1] = { "" } + end end + end +end + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

dt[k] = xml.assign(root) or xml.assign(dt,k,root) @@ -3635,6 +5031,42 @@ function xml.assign(dt,k,root) end end +-- the following helpers may move + +--[[ldx-- +

The next helper assigns a tree (or string). Usage:

+ +xml.tocdata(e) +xml.tocdata(e,"error") + +--ldx]]-- + +function xml.tocdata(e,wrapper) + local whatever = xmltostring(e.dt) + if wrapper then + whatever = format("<%s>%s",wrapper,whatever,wrapper) + end + local t = { special = true, ns = "", tg = "@cd@", at = {}, rn = "", dt = { whatever }, __p__ = e } + setmetatable(t,getmetatable(e)) + e.dt = { t } +end + +function xml.makestandalone(root) + if root.ri then + local dt = root.dt + for k=1,#dt do + local v = dt[k] + if type(v) == "table" and v.special and v.tg == "@pi@" then + local txt = v.dt[1] + if find(txt,"xml.*version=") then + v.dt[1] = txt .. " standalone='yes'" + break + end + end + end + end +end + end -- of closure @@ -3648,1420 +5080,1285 @@ if not modules then modules = { } end modules ['lxml-pth'] = { license = "see context related readme files" } +-- e.ni is only valid after a filter run + local concat, remove, insert = table.concat, table.remove, table.insert local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, lower, gmatch, gsub, find = string.format, string.lower, string.gmatch, string.gsub, string.find +local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep +local lpegmatch = lpeg.match + +-- beware, this is not xpath ... e.g. position is different (currently) and +-- we have reverse-sibling as reversed preceding sibling --[[ldx--

This module can be used stand alone but also inside in which case it hooks into the tracker code. Therefore we provide a few functions that set the tracers. Here we overload a previously defined function.

+

If I can get in the mood I will make a variant that is XSLT compliant +but I wonder if it makes sense.

--ldx]]-- -local trace_lpath = false - -if trackers then - trackers.register("xml.lpath", function(v) trace_lpath = v end) -end +--[[ldx-- +

Expecially the lpath code is experimental, we will support some of xpath, but +only things that make sense for us; as compensation it is possible to hook in your +own functions. Apart from preprocessing content for we also need +this module for process management, like handling and +files.

-local settrace = xml.settrace -- lxml-tab + +a/b/c /*/c +a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n) +a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n) + +--ldx]]-- -function xml.settrace(str,value) - if str == "lpath" then - trace_lpath = value or false - else - settrace(str,value) -- lxml-tab - end -end +local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end +local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end +local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end --[[ldx-- -

We've now arrived at an intersting part: accessing the tree using a subset +

We've now arrived at an interesting part: accessing the tree using a subset of and since we're not compatible we call it . We will explain more about its usage in other documents.

--ldx]]-- -local lpathcalls = 0 -- statistics -local lpathcached = 0 -- statistics +local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end +local lpathcached = 0 function xml.lpathcached() return lpathcached end -xml.functions = xml.functions or { } -xml.expressions = xml.expressions or { } +xml.functions = xml.functions or { } -- internal +xml.expressions = xml.expressions or { } -- in expressions +xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection) +xml.specialhandler = xml.specialhandler or { } local functions = xml.functions local expressions = xml.expressions +local finalizers = xml.finalizers -local actions = { - [10] = "stay", - [11] = "parent", - [12] = "subtree root", - [13] = "document root", - [14] = "any", - [15] = "many", - [16] = "initial", - [20] = "match", - [21] = "match one of", - [22] = "match and attribute eq", - [23] = "match and attribute ne", - [24] = "match one of and attribute eq", - [25] = "match one of and attribute ne", - [27] = "has attribute", - [28] = "has value", - [29] = "fast match", - [30] = "select", - [31] = "expression", - [40] = "processing instruction", -} - --- a rather dumb lpeg +finalizers.xml = finalizers.xml or { } +finalizers.tex = finalizers.tex or { } -local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc +local function fallback (t, name) + local fn = finalizers[name] + if fn then + t[name] = fn + else + logs.report("xml","unknown sub finalizer '%s'",tostring(name)) + fn = function() end + end + return fn +end --- instead of using functions we just parse a few names which saves a call --- later on +setmetatable(finalizers.xml, { __index = fallback }) +setmetatable(finalizers.tex, { __index = fallback }) -local lp_position = P("position()") / "ps" -local lp_index = P("index()") / "id" -local lp_text = P("text()") / "tx" -local lp_name = P("name()") / "(ns~='' and ns..':'..tg)" -- "((rt.ns~='' and rt.ns..':'..rt.tg) or '')" -local lp_tag = P("tag()") / "tg" -- (rt.tg or '') -local lp_ns = P("ns()") / "ns" -- (rt.ns or '') -local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") -local lp_doequal = P("=") / "==" -local lp_attribute = P("@") / "" * Cc("(at['") * R("az","AZ","--","__")^1 * Cc("'] or '')") +xml.defaultprotocol = "xml" -local lp_lua_function = C(R("az","AZ","--","__")^1 * (P(".") * R("az","AZ","--","__")^1)^1) * P("(") / function(t) -- todo: better . handling - return t .. "(" -end +-- as xsl does not follow xpath completely here we will also +-- be more liberal especially with regards to the use of | and +-- the rootpath: +-- +-- test : all 'test' under current +-- /test : 'test' relative to current +-- a|b|c : set of names +-- (a|b|c) : idem +-- ! : not +-- +-- after all, we're not doing transformations but filtering. in +-- addition we provide filter functions (last bit) +-- +-- todo: optimizer +-- +-- .. : parent +-- * : all kids +-- / : anchor here +-- // : /**/ +-- ** : all in between +-- +-- so far we had (more practical as we don't transform) +-- +-- {/test} : kids 'test' under current node +-- {test} : any kid with tag 'test' +-- {//test} : same as above -local lp_function = C(R("az","AZ","--","__")^1) * P("(") / function(t) -- todo: better . handling - if expressions[t] then - return "expressions." .. t .. "(" - else - return "expressions.error(" - end -end +-- evaluator (needs to be redone, for the moment copied) -local lparent = lpeg.P("(") -local rparent = lpeg.P(")") -local noparent = 1 - (lparent+rparent) -local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent} -local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"} +-- todo: apply_axis(list,notable) and collection vs single --- if we use a dedicated namespace then we don't need to pass rt and k +local apply_axis = { } -local lp_special = (C(P("name")+P("text")+P("tag"))) * value / function(t,s) - if expressions[t] then - if s then - return "expressions." .. t .. "(r,k," .. s ..")" - else - return "expressions." .. t .. "(r,k)" +apply_axis['root'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local rt = ll + while ll do + ll = ll.__p__ + if ll then + rt = ll + end end - else - return "expressions.error(" .. t .. ")" + collected[#collected+1] = rt end + return collected end -local converter = lpeg.Cs ( ( - lp_position + - lp_index + - lp_text + lp_name + -- fast one - lp_special + - lp_noequal + lp_doequal + - lp_attribute + - lp_lua_function + - lp_function + -1 )^1 ) - --- expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1 +apply_axis['self'] = function(list) +--~ local collected = { } +--~ for l=1,#list do +--~ collected[#collected+1] = list[l] +--~ end +--~ return collected + return list +end -local template = [[ - return function(expressions,r,d,k,e,dt,ns,tg,id,ps) - local at, tx = e.at or { }, dt[1] or "" - return %s +apply_axis['child'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local dt = ll.dt + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + end + end + ll.en = en end -]] - -local function make_expression(str) - str = converter:match(str) - return str, loadstring(format(template,str))() -end - -local map = { } - -local space = S(' \r\n\t') -local squote = S("'") -local dquote = S('"') -local lparent = P('(') -local rparent = P(')') -local atsign = P('@') -local lbracket = P('[') -local rbracket = P(']') -local exclam = P('!') -local period = P('.') -local eq = P('==') + P('=') -local ne = P('<>') + P('!=') -local star = P('*') -local slash = P('/') -local colon = P(':') -local bar = P('|') -local hat = P('^') -local valid = R('az', 'AZ', '09') + S('_-') -local name_yes = C(valid^1 + star) * colon * C(valid^1 + star) -- permits ns:* *:tg *:* -local name_nop = Cc("*") * C(valid^1) -local name = name_yes + name_nop -local number = C((S('+-')^0 * R('09')^1)) / tonumber -local names = (bar^0 * name)^1 -local morenames = name * (bar^0 * name)^1 -local instructiontag = P('pi::') -local spacing = C(space^0) -local somespace = space^1 -local optionalspace = space^0 -local text = C(valid^0) -local value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -local empty = 1-slash - -local is_eq = lbracket * atsign * name * eq * value * rbracket -local is_ne = lbracket * atsign * name * ne * value * rbracket -local is_attribute = lbracket * atsign * name * rbracket -local is_value = lbracket * value * rbracket -local is_number = lbracket * number * rbracket - -local nobracket = 1-(lbracket+rbracket) -- must be improved -local is_expression = lbracket * C(((C(nobracket^1))/make_expression)) * rbracket - -local is_expression = lbracket * (C(nobracket^1))/make_expression * rbracket - -local is_one = name -local is_none = exclam * name -local is_one_of = ((lparent * names * rparent) + morenames) -local is_none_of = exclam * ((lparent * names * rparent) + morenames) - -local stay = (period ) -local parent = (period * period ) / function( ) map[#map+1] = { 11 } end -local subtreeroot = (slash + hat ) / function( ) map[#map+1] = { 12 } end -local documentroot = (hat * hat ) / function( ) map[#map+1] = { 13 } end -local any = (star ) / function( ) map[#map+1] = { 14 } end -local many = (star * star ) / function( ) map[#map+1] = { 15 } end -local initial = (hat * hat * hat ) / function( ) map[#map+1] = { 16 } end - -local match = (is_one ) / function(...) map[#map+1] = { 20, true , ... } end -local match_one_of = (is_one_of ) / function(...) map[#map+1] = { 21, true , ... } end -local dont_match = (is_none ) / function(...) map[#map+1] = { 20, false, ... } end -local dont_match_one_of = (is_none_of ) / function(...) map[#map+1] = { 21, false, ... } end - -local match_and_eq = (is_one * is_eq ) / function(...) map[#map+1] = { 22, true , ... } end -local match_and_ne = (is_one * is_ne ) / function(...) map[#map+1] = { 23, true , ... } end -local dont_match_and_eq = (is_none * is_eq ) / function(...) map[#map+1] = { 22, false, ... } end -local dont_match_and_ne = (is_none * is_ne ) / function(...) map[#map+1] = { 23, false, ... } end - -local match_one_of_and_eq = (is_one_of * is_eq ) / function(...) map[#map+1] = { 24, true , ... } end -local match_one_of_and_ne = (is_one_of * is_ne ) / function(...) map[#map+1] = { 25, true , ... } end -local dont_match_one_of_and_eq = (is_none_of * is_eq ) / function(...) map[#map+1] = { 24, false, ... } end -local dont_match_one_of_and_ne = (is_none_of * is_ne ) / function(...) map[#map+1] = { 25, false, ... } end - -local has_attribute = (is_one * is_attribute) / function(...) map[#map+1] = { 27, true , ... } end -local has_value = (is_one * is_value ) / function(...) map[#map+1] = { 28, true , ... } end -local dont_has_attribute = (is_none * is_attribute) / function(...) map[#map+1] = { 27, false, ... } end -local dont_has_value = (is_none * is_value ) / function(...) map[#map+1] = { 28, false, ... } end -local position = (is_one * is_number ) / function(...) map[#map+1] = { 30, true, ... } end -local dont_position = (is_none * is_number ) / function(...) map[#map+1] = { 30, false, ... } end - -local expression = (is_one * is_expression)/ function(...) map[#map+1] = { 31, true, ... } end -local dont_expression = (is_none * is_expression)/ function(...) map[#map+1] = { 31, false, ... } end - -local self_expression = ( is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end - map[#map+1] = { 31, true, "*", "*", ... } end -local dont_self_expression = (exclam * is_expression) / function(...) if #map == 0 then map[#map+1] = { 11 } end - map[#map+1] = { 31, false, "*", "*", ... } end - -local instruction = (instructiontag * text ) / function(...) map[#map+1] = { 40, ... } end -local nothing = (empty ) / function( ) map[#map+1] = { 15 } end -- 15 ? -local crap = (1-slash)^1 - --- a few ugly goodies: - -local docroottag = P('^^') / function( ) map[#map+1] = { 12 } end -local subroottag = P('^') / function( ) map[#map+1] = { 13 } end -local roottag = P('root::') / function( ) map[#map+1] = { 12 } end -local parenttag = P('parent::') / function( ) map[#map+1] = { 11 } end -local childtag = P('child::') -local selftag = P('self::') - --- there will be more and order will be optimized - -local selector = ( - instruction + --- many + any + -- brrr, not here ! - parent + stay + - dont_position + position + - dont_match_one_of_and_eq + dont_match_one_of_and_ne + - match_one_of_and_eq + match_one_of_and_ne + - dont_match_and_eq + dont_match_and_ne + - match_and_eq + match_and_ne + - dont_expression + expression + - dont_self_expression + self_expression + - has_attribute + has_value + - dont_match_one_of + match_one_of + - dont_match + match + - many + any + - crap + empty -) - -local grammar = P { "startup", - startup = (initial + documentroot + subtreeroot + roottag + docroottag + subroottag)^0 * V("followup"), - followup = ((slash + parenttag + childtag + selftag)^0 * selector)^1, -} + return collected +end -local function compose(str) - if not str or str == "" then - -- wildcard - return true - elseif str == '/' then - -- root - return false - else - map = { } - grammar:match(str) - if #map == 0 then - return true - else - local m = map[1][1] - if #map == 1 then - if m == 14 or m == 15 then - -- wildcard - return true - elseif m == 12 then - -- root - return false - end - elseif #map == 2 and m == 12 and map[2][1] == 20 then - -- return { { 29, map[2][2], map[2][3], map[2][4], map[2][5] } } - map[2][1] = 29 - return { map[2] } - end - if m ~= 11 and m ~= 12 and m ~= 13 and m ~= 14 and m ~= 15 and m ~= 16 then - insert(map, 1, { 16 }) +local function collect(list,collected) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + collect(dk,collected) end - -- print(gsub(table.serialize(map),"[ \n]+"," ")) - return map end + list.en = en end end +apply_axis['descendant'] = function(list) + local collected = { } + for l=1,#list do + collect(list[l],collected) + end + return collected +end -local cache = { } - -function xml.lpath(pattern,trace) - lpathcalls = lpathcalls + 1 - if type(pattern) == "string" then - local result = cache[pattern] - if result == nil then -- can be false which is valid -) - result = compose(pattern) - cache[pattern] = result - lpathcached = lpathcached + 1 +local function collect(list,collected) + local dt = list.dt + if dt then + local en = 0 + for k=1,#dt do + local dk = dt[k] + if dk.tg then + collected[#collected+1] = dk + dk.ni = k -- refresh + en = en + 1 + dk.ei = en + collect(dk,collected) + end end - if trace or trace_lpath then - xml.lshow(result) + list.en = en + end +end +apply_axis['descendant-or-self'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + if ll.special ~= true then -- catch double root + collected[#collected+1] = ll end - return result - else - return pattern + collect(ll,collected) end + return collected end -function xml.cached_patterns() - return cache +apply_axis['ancestor'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + while ll do + ll = ll.__p__ + if ll then + collected[#collected+1] = ll + end + end + end + return collected end --- we run out of locals (limited to 200) --- --- local fallbackreport = (texio and texio.write) or io.write - -function xml.lshow(pattern,report) --- report = report or fallbackreport - report = report or (texio and texio.write) or io.write - local lp = xml.lpath(pattern) - if lp == false then - report(" -: root\n") - elseif lp == true then - report(" -: wildcard\n") - else - if type(pattern) == "string" then - report(format("pattern: %s\n",pattern)) - end - for k=1,#lp do - local v = lp[k] - if #v > 1 then - local t = { } - for i=2,#v do - local vv = v[i] - if type(vv) == "string" then - t[#t+1] = (vv ~= "" and vv) or "#" - elseif type(vv) == "boolean" then - t[#t+1] = (vv and "==") or "<>" - end - end - report(format("%2i: %s %s -> %s\n", k,v[1],actions[v[1]],concat(t," "))) - else - report(format("%2i: %s %s\n", k,v[1],actions[v[1]])) +apply_axis['ancestor-or-self'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + collected[#collected+1] = ll + while ll do + ll = ll.__p__ + if ll then + collected[#collected+1] = ll end end end + return collected end -function xml.xshow(e,...) -- also handy when report is given, use () to isolate first e - local t = { ... } --- local report = (type(t[#t]) == "function" and t[#t]) or fallbackreport - local report = (type(t[#t]) == "function" and t[#t]) or (texio and texio.write) or io.write - if e == nil then - report("\n") - elseif type(e) ~= "table" then - report(tostring(e)) - elseif e.tg then - report(tostring(e) .. "\n") - else - for i=1,#e do - report(tostring(e[i]) .. "\n") +apply_axis['parent'] = function(list) + local collected = { } + for l=1,#list do + local pl = list[l].__p__ + if pl then + collected[#collected+1] = pl end end + return collected end ---[[ldx-- -

An is converted to a table with instructions for traversing the -tree. Hoever, simple cases are signaled by booleans. Because we don't know in -advance what we want to do with the found element the handle gets three arguments:

+apply_axis['attribute'] = function(list) + return { } +end - -r : the root element of the data table -d : the data table of the result -t : the index in the data table of the result - +apply_axis['namespace'] = function(list) + return { } +end -

Access to the root and data table makes it possible to construct insert and delete -functions.

---ldx]]-- - -local functions = xml.functions -local expressions = xml.expressions - -expressions.contains = string.find -expressions.find = string.find -expressions.upper = string.upper -expressions.lower = string.lower -expressions.number = tonumber -expressions.boolean = toboolean - -expressions.oneof = function(s,...) -- slow - local t = {...} for i=1,#t do if s == t[i] then return true end end return false -end - -expressions.error = function(str) - xml.error_handler("unknown function in lpath expression",str or "?") - return false -end - -functions.text = function(root,k,n) -- unchecked, maybe one deeper - local t = type(t) - if t == "string" then - return t - else -- todo n - local rdt = root.dt - return (rdt and rdt[k]) or root[k] or "" - end +apply_axis['following'] = function(list) -- incomplete +--~ local collected = { } +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni+1,#d do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ collected[#collected+1] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } +end + +apply_axis['preceding'] = function(list) -- incomplete +--~ local collected = { } +--~ for l=1,#list do +--~ local ll = list[l] +--~ local p = ll.__p__ +--~ local d = p.dt +--~ for i=ll.ni-1,1,-1 do +--~ local di = d[i] +--~ if type(di) == "table" then +--~ collected[#collected+1] = di +--~ break +--~ end +--~ end +--~ end +--~ return collected + return { } end -functions.name = function(d,k,n) -- ns + tg - local found = false - n = n or 0 - if not k then - -- not found - elseif n == 0 then - local dk = d[k] - found = dk and (type(dk) == "table") and dk - elseif n < 0 then - for i=k-1,1,-1 do - local di = d[i] - if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end - end - end - else - for i=k+1,#d,1 do +apply_axis['following-sibling'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni+1,#d do local di = d[i] if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end + collected[#collected+1] = di end end end - if found then - local ns, tg = found.rn or found.ns or "", found.tg - if ns ~= "" then - return ns .. ":" .. tg - else - return tg - end - else - return "" - end + return collected end -functions.tag = function(d,k,n) -- only tg - local found = false - n = n or 0 - if not k then - -- not found - elseif n == 0 then - local dk = d[k] - found = dk and (type(dk) == "table") and dk - elseif n < 0 then - for i=k-1,1,-1 do +apply_axis['preceding-sibling'] = function(list) + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=1,ll.ni-1 do local di = d[i] if type(di) == "table" then - if n == -1 then - found = di - break - else - n = n + 1 - end + collected[#collected+1] = di end end - else - for i=k+1,#d,1 do + end + return collected +end + +apply_axis['reverse-sibling'] = function(list) -- reverse preceding + local collected = { } + for l=1,#list do + local ll = list[l] + local p = ll.__p__ + local d = p.dt + for i=ll.ni-1,1,-1 do local di = d[i] if type(di) == "table" then - if n == 1 then - found = di - break - else - n = n - 1 - end + collected[#collected+1] = di end end end - return (found and found.tg) or "" + return collected end -expressions.text = functions.text -expressions.name = functions.name -expressions.tag = functions.tag +apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self'] +apply_axis['auto-descendant'] = apply_axis['descendant'] +apply_axis['auto-child'] = apply_axis['child'] +apply_axis['auto-self'] = apply_axis['self'] +apply_axis['initial-child'] = apply_axis['child'] -local function traverse(root,pattern,handle,reverse,index,parent,wildcard) -- multiple only for tags, not for namespaces - if not root then -- error - return false - elseif pattern == false then -- root - handle(root,root.dt,root.ri) - return false - elseif pattern == true then -- wildcard - local rootdt = root.dt - if rootdt then - local start, stop, step = 1, #rootdt, 1 - if reverse then - start, stop, step = stop, start, -1 - end - for k=start,stop,step do - if handle(root,rootdt,root.ri or k) then return false end - if not traverse(rootdt[k],true,handle,reverse) then return false end - end - end - return false - elseif root.dt then - index = index or 1 - local action = pattern[index] - local command = action[1] - if command == 29 then -- fast case /oeps - local rootdt = root.dt - for k=1,#rootdt do - local e = rootdt[k] - local tg = e.tg - if e.tg then - local ns = e.rn or e.ns - local ns_a, tg_a = action[3], action[4] - local matched = (ns_a == "*" or ns == ns_a) and (tg_a == "*" or tg == tg_a) - if not action[2] then matched = not matched end - if matched then - if handle(root,rootdt,k) then return false end - end - end - end - elseif command == 11 then -- parent - local ep = root.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false +local function apply_nodes(list,directive,nodes) + -- todo: nodes[1] etc ... negated node name in set ... when needed + -- ... currently ignored + local maxn = #nodes + if maxn == 3 then --optimized loop + local nns, ntg = nodes[2], nodes[3] + if not nns and not ntg then -- wildcard + if directive then + return list + else + return { } end else - if (command == 16 or command == 12) and index == 1 then -- initial - -- wildcard = true - wildcard = command == 16 -- ok? - index = index + 1 - action = pattern[index] - command = action and action[1] or 0 -- something is wrong - end - if command == 11 then -- parent - local ep = root.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false - end - else - local rootdt = root.dt - local start, stop, step, n, dn = 1, #rootdt, 1, 0, 1 - if command == 30 then - if action[5] < 0 then - start, stop, step = stop, start, -1 - dn = -1 - end - elseif reverse and index == #pattern then - start, stop, step = stop, start, -1 - end - local idx = 0 - local hsh = { } -- this will slooow down the lot - for k=start,stop,step do -- we used to have functions for all but a case is faster - local e = rootdt[k] - local ns, tg = e.rn or e.ns, e.tg - if tg then - -- we can optimize this for simple searches, but it probably does not pay off - hsh[tg] = (hsh[tg] or 0) + 1 - idx = idx + 1 - if command == 30 then - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false + local collected, m, p = { }, 0, nil + if not nns then -- only check tag + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + if directive then + if ntg == ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - if not action[2] then matched = not matched end - if matched then - n = n + dn - if n == action[5] then - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - break - end - elseif wildcard then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end - end - else - local matched, multiple = false, false - if command == 20 then -- match - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - elseif command == 21 then -- match one of - multiple = true - for i=3,#action,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - elseif command == 22 then -- eq - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - matched = matched and e.at[action[6]] == action[7] - elseif command == 23 then -- ne - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = mached and e.at[action[6]] ~= action[7] - elseif command == 24 then -- one of eq - multiple = true - for i=3,#action-2,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[#action-1]] == action[#action] - elseif command == 25 then -- one of ne - multiple = true - for i=3,#action-2,2 do - local ns_a, tg_a = action[i], action[i+1] - if (ns_a == "*" or ns == ns_a) and (tg == "*" or tg == tg_a) then - matched = true - break - end - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[#action-1]] ~= action[#action] - elseif command == 27 then -- has attribute - local ns_a, tg_a = action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = matched and e.at[action[5]] - elseif command == 28 then -- has value - local edt, ns_a, tg_a = e.dt, action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - matched = matched and edt and edt[1] == action[5] - elseif command == 31 then - local edt, ns_a, tg_a = e.dt, action[3], action[4] - if tg == tg_a then - matched = ns_a == "*" or ns == ns_a - elseif tg_a == '*' then - matched, multiple = ns_a == "*" or ns == ns_a, true - else - matched = false - end - if not action[2] then matched = not matched end - if matched then - matched = action[6](expressions,root,rootdt,k,e,edt,ns,tg,idx,hsh[tg] or 1) - end - end - if matched then -- combine tg test and at test - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - if wildcard then - if multiple then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end - else - -- maybe or multiple; anyhow, check on (section|title) vs just section and title in example in lxml - if not traverse(e,pattern,handle,reverse,index,root) then return false end - end - end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - elseif command == 14 then -- any - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root) then return false end - end - elseif command == 15 then -- many - if index == #pattern then - if handle(root,rootdt,root.ri or k) then return false end - else - if not traverse(e,pattern,handle,reverse,index+1,root,true) then return false end - end - -- not here : 11 - elseif command == 11 then -- parent - local ep = e.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,root,index+1) then return false end - elseif handle(root,rootdt,k) then - return false - end - elseif command == 40 and e.special and tg == "@pi@" then -- pi - local pi = action[2] - if pi ~= "" then - local pt = e.dt[1] - if pt and pt:find(pi) then - if handle(root,rootdt,k) then - return false - end - end - elseif handle(root,rootdt,k) then - return false - end - elseif wildcard then - if not traverse(e,pattern,handle,reverse,index,root,true) then return false end + elseif ntg ~= ltg then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + end + end + elseif not ntg then -- only check namespace + for l=1,#list do + local ll = list[l] + local lns = ll.rn or ll.ns + if lns then + if directive then + if lns == nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end + elseif lns ~= nns then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - else - -- not here : 11 - if command == 11 then -- parent - local ep = e.__p__ or parent - if index < #pattern then - if not traverse(ep,pattern,handle,reverse,index+1,root) then return false end - elseif handle(root,rootdt,k) then - return false + end + end + else -- check both + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = ltg == ntg and lns == nns + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end - break -- else loop + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m end end end end + return collected + end + else + local collected, m, p = { }, 0, nil + for l=1,#list do + local ll = list[l] + local ltg = ll.tg + if ltg then + local lns = ll.rn or ll.ns + local ok = false + for n=1,maxn,3 do + local nns, ntg = nodes[n+1], nodes[n+2] + ok = (not ntg or ltg == ntg) and (not nns or lns == nns) + if ok then + break + end + end + if directive then + if ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + elseif not ok then + local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end + collected[#collected+1], ll.mi = ll, m + end + end end + return collected end - return true end -xml.traverse = traverse +local quit_expression = false ---[[ldx-- -

Next come all kind of locators and manipulators. The most generic function here -is xml.filter(root,pattern). All registers functions in the filters namespace -can be path of a search path, as in:

+local function apply_expression(list,expression,order) + local collected = { } + quit_expression = false + for l=1,#list do + local ll = list[l] + if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1 + collected[#collected+1] = ll + end + if quit_expression then + break + end + end + return collected +end + +local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb + +local spaces = S(" \n\r\t\f")^0 +local lp_space = S(" \n\r\t\f") +local lp_any = P(1) +local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==") +local lp_doequal = P("=") / "==" +local lp_or = P("|") / " or " +local lp_and = P("&") / " and " + +local lp_builtin = P ( + P("firstindex") / "1" + + P("lastindex") / "(#ll.__p__.dt or 1)" + + P("firstelement") / "1" + + P("lastelement") / "(ll.__p__.en or 1)" + + P("first") / "1" + + P("last") / "#list" + + P("rootposition") / "order" + + P("position") / "l" + -- is element in finalizer + P("order") / "order" + + P("element") / "(ll.ei or 1)" + + P("index") / "(ll.ni or 1)" + + P("match") / "(ll.mi or 1)" + + P("text") / "(ll.dt[1] or '')" + + -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" + + P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" + + P("tag") / "ll.tg" + + P("ns") / "ll.ns" + ) * ((spaces * P("(") * spaces * P(")"))/"") + +local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * R("az","AZ","--","__")^1 * Cc("'])") +local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end) +local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end) +local lp_fastpos = lp_fastpos_n + lp_fastpos_p +local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false") + +local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling + return t .. "(" +end - -local r, d, k = xml.filter(root,"/a/b/c/position(4)" - ---ldx]]-- +local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling + if expressions[t] then + return "expr." .. t .. "(" + else + return "expr.error(" + end +end -local traverse, lpath, convert = xml.traverse, xml.lpath, xml.convert +local lparent = lpeg.P("(") +local rparent = lpeg.P(")") +local noparent = 1 - (lparent+rparent) +local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent} +local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"} -xml.filters = { } +local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')") +local lp_number = S("+-") * R("09")^1 +local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'") +local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"')) -function xml.filters.default(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end) - return dt and dt[dk], rt, dt, dk -end +local cleaner -function xml.filters.attributes(root,pattern,arguments) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end) - local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at) - if ekat then - if arguments then - return ekat[arguments] or "", rt, dt, dk +local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s) + if expressions[t] then + s = s and s ~= "" and lpegmatch(cleaner,s) + if s and s ~= "" then + return "expr." .. t .. "(ll," .. s ..")" else - return ekat, rt, dt, dk + return "expr." .. t .. "(ll)" end else - return { }, rt, dt, dk + return "expr.error(" .. t .. ")" end end -function xml.filters.reverse(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse') - return dt and dt[dk], rt, dt, dk -end +local content = + lp_builtin + + lp_attribute + + lp_special + + lp_noequal + lp_doequal + + lp_or + lp_and + + lp_reserved + + lp_lua_function + lp_function + + lp_content + -- too fragile + lp_child + + lp_any + +local converter = Cs ( + lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0 +) -function xml.filters.count(root,pattern,everything) - local n = 0 - traverse(root, lpath(pattern), function(r,d,t) - if everything or type(d[t]) == "table" then - n = n + 1 - end - end) - return n -end +cleaner = Cs ( ( +--~ lp_fastpos + + lp_reserved + + lp_number + + lp_string + +1 )^1 ) -function xml.filters.elements(root, pattern) -- == all - local t = { } - traverse(root, lpath(pattern), function(r,d,k) - local e = d[k] - if e then - t[#t+1] = e - end - end) - return t -end -function xml.filters.texts(root, pattern) - local t = { } - traverse(root, lpath(pattern), function(r,d,k) - local e = d[k] - if e and e.dt then - t[#t+1] = e.dt - end - end) - return t -end +--~ expr -function xml.filters.first(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end) - return dt and dt[dk], rt, dt, dk -end +local template_e = [[ + local expr = xml.expressions + return function(list,ll,l,order) + return %s + end +]] -function xml.filters.last(root,pattern) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt,dt,dk = r,d,k return true end, 'reverse') - return dt and dt[dk], rt, dt, dk -end +local template_f_y = [[ + local finalizer = xml.finalizers['%s']['%s'] + return function(collection) + return finalizer(collection,%s) + end +]] -function xml.filters.index(root,pattern,arguments) - local rt, dt, dk, reverse, i = nil, nil, nil, false, tonumber(arguments or '1') or 1 - if i and i ~= 0 then - if i < 0 then - reverse, i = true, -i - end - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk, i = r, d, k, i-1 return i == 0 end, reverse) - if i == 0 then - return dt and dt[dk], rt, dt, dk - end +local template_f_n = [[ + return xml.finalizers['%s']['%s'] +]] + +-- + +local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } +local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } +local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } +local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } +local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } +local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } +local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } +local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } +local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } +local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } +local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } +local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } +local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } +local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } +local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } + +local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } +local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } +local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } +local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } + +local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } + +local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } + +local skip = { } + +local function errorrunner_e(str,cnv) + if not skip[str] then + logs.report("lpath","error in expression: %s => %s",str,cnv) + skip[str] = cnv or str end - return nil, nil, nil, nil + return false +end +local function errorrunner_f(str,arg) + logs.report("lpath","error in finalizer: %s(%s)",str,arg or "") + return false end -function xml.filters.attribute(root,pattern,arguments) - local rt, dt, dk - traverse(root, lpath(pattern), function(r,d,k) rt, dt, dk = r, d, k return true end) - local ekat = (dt and dt[dk] and dt[dk].at) or (rt and rt.at) - return (ekat and (ekat[arguments] or ekat[gsub(arguments,"^([\"\'])(.*)%1$","%2")])) or "" +local function register_nodes(nodetest,nodes) + return { kind = "nodes", nodetest = nodetest, nodes = nodes } end -function xml.filters.text(root,pattern,arguments) -- ?? why index, tostring slow - local dtk, rt, dt, dk = xml.filters.index(root,pattern,arguments) - if dtk then -- n - local dtkdt = dtk.dt - if not dtkdt then - return "", rt, dt, dk - elseif #dtkdt == 1 and type(dtkdt[1]) == "string" then - return dtkdt[1], rt, dt, dk - else - return xml.tostring(dtkdt), rt, dt, dk - end +local function register_expression(expression) + local converted = lpegmatch(converter,expression) + local runner = loadstring(format(template_e,converted)) + runner = (runner and runner()) or function() errorrunner_e(expression,converted) end + return { kind = "expression", expression = expression, converted = converted, evaluator = runner } +end + +local function register_finalizer(protocol,name,arguments) + local runner + if arguments and arguments ~= "" then + runner = loadstring(format(template_f_y,protocol or xml.defaultprotocol,name,arguments)) else - return "", rt, dt, dk + runner = loadstring(format(template_f_n,protocol or xml.defaultprotocol,name)) end + runner = (runner and runner()) or function() errorrunner_f(name,arguments) end + return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner } end -function xml.filters.tag(root,pattern,n) - local tag = "" - traverse(root, lpath(pattern), function(r,d,k) - tag = xml.functions.tag(d,k,n and tonumber(n)) - return true - end) - return tag -end +local expression = P { "ex", + ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]", + sq = "'" * (1 - S("'"))^0 * "'", + dq = '"' * (1 - S('"'))^0 * '"', +} -function xml.filters.name(root,pattern,n) - local tag = "" - traverse(root, lpath(pattern), function(r,d,k) - tag = xml.functions.name(d,k,n and tonumber(n)) - return true - end) - return tag +local arguments = P { "ar", + ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")", + nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end, + sq = P("'") * (1 - P("'"))^0 * P("'"), + dq = P('"') * (1 - P('"'))^0 * P('"'), +} + +-- todo: better arg parser + +local function register_error(str) + return { kind = "error", error = format("unparsed: %s",str) } end ---[[ldx-- -

For splitting the filter function from the path specification, we can -use string matching or lpeg matching. Here the difference in speed is -neglectable but the lpeg variant is more robust.

---ldx]]-- +-- there is a difference in * and /*/ and so we need to catch a few special cases + +local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed +local special_2 = P("/") * Cc(register_auto_self) +local special_3 = P("") * Cc(register_auto_self) + +local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside + + patterns = spaces * V("protocol") * spaces * ( + ( V("special") * spaces * P(-1) ) + + ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) + ), + + protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), + + -- the / is needed for // as descendant or self is somewhat special + -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0, + + axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") + + V("descendant_or_self") + V("following_sibling") + V("following") + + V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + + #(1-P(-1)) * Cc(register_auto_child), + + special = special_1 + special_2 + special_3, --- not faster but hipper ... although ... i can't get rid of the trailing / in the path + initial = (P("/") * spaces * Cc(register_initial_child))^-1, -local P, S, R, C, V, Cc = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc + error = (P(1)^1) / register_error, -local slash = P('/') -local name = (R("az","AZ","--","__"))^1 -local path = C(((1-slash)^0 * slash)^1) -local argument = P { "(" * C(((1 - S("()")) + V(1))^0) * ")" } -local action = Cc(1) * path * C(name) * argument -local attribute = Cc(2) * path * P('@') * C(name) -local direct = Cc(3) * Cc("../*") * slash^0 * C(name) * argument + shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"), -local parser = direct + action + attribute + shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0, -local filters = xml.filters -local attribute_filter = xml.filters.attributes -local default_filter = xml.filters.default + s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus + -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self), + s_descendant = P("**") * Cc(register_descendant), + s_child = P("*") * #(1-P(":")) * Cc(register_child ), +-- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ), + s_parent = P("..") * Cc(register_parent ), + s_self = P("." ) * Cc(register_self ), + s_root = P("^^") * Cc(register_root ), + s_ancestor = P("^") * Cc(register_ancestor ), --- todo: also hash, could be gc'd + descendant = P("descendant::") * Cc(register_descendant ), + child = P("child::") * Cc(register_child ), + parent = P("parent::") * Cc(register_parent ), + self = P("self::") * Cc(register_self ), + root = P('root::') * Cc(register_root ), + ancestor = P('ancestor::') * Cc(register_ancestor ), + descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ), + ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ), + -- attribute = P('attribute::') * Cc(register_attribute ), + -- namespace = P('namespace::') * Cc(register_namespace ), + following = P('following::') * Cc(register_following ), + following_sibling = P('following-sibling::') * Cc(register_following_sibling ), + preceding = P('preceding::') * Cc(register_preceding ), + preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ), + reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ), -function xml.filter(root,pattern) - local kind, a, b, c = parser:match(pattern) - if kind == 1 or kind == 3 then - return (filters[b] or default_filter)(root,a,c) - elseif kind == 2 then - return attribute_filter(root,a,b) + nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes, + + expressions = expression / register_expression, + + letters = R("az")^1, + name = (1-lpeg.S("/[]()|:*!"))^1, + negate = P("!") * Cc(false), + + nodefunction = V("negate") + P("not") * Cc(false) + Cc(true), + nodetest = V("negate") + Cc(true), + nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))), + wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")), + nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces, + + finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer, + +} + +local cache = { } + +local function nodesettostring(set,nodetest) + local t = { } + for i=1,#set,3 do + local directive, ns, tg = set[i], set[i+1], set[i+2] + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + t[#t+1] = (directive and tg) or format("not(%s)",tg) + end + if nodetest == false then + return format("not(%s)",concat(t,"|")) else - return default_filter(root,pattern) + return concat(t,"|") end end ---~ slightly faster, but first we need a proper test file ---~ ---~ local hash = { } ---~ ---~ function xml.filter(root,pattern) ---~ local h = hash[pattern] ---~ if not h then ---~ local kind, a, b, c = parser:match(pattern) ---~ if kind == 1 then ---~ h = { kind, filters[b] or default_filter, a, b, c } ---~ elseif kind == 2 then ---~ h = { kind, attribute_filter, a, b, c } ---~ else ---~ h = { kind, default_filter, a, b, c } ---~ end ---~ hash[pattern] = h ---~ end ---~ local kind = h[1] ---~ if kind == 1 then ---~ return h[2](root,h[2],h[4]) ---~ elseif kind == 2 then ---~ return h[2](root,h[2],h[3]) ---~ else ---~ return h[2](root,pattern) ---~ end ---~ end +local function tagstostring(list) + if #list == 0 then + return "no elements" + else + local t = { } + for i=1, #list do + local li = list[i] + local ns, tg = li.ns, li.tg + if not ns or ns == "" then ns = "*" end + if not tg or tg == "" then tg = "*" end + t[#t+1] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg) + end + return concat(t," ") + end +end ---[[ldx-- -

The following functions collect elements and texts.

---ldx]]-- +xml.nodesettostring = nodesettostring --- still somewhat bugged +local parse_pattern -- we have a harmless kind of circular reference -function xml.collect_elements(root, pattern, ignorespaces) - local rr, dd = { }, { } - traverse(root, lpath(pattern), function(r,d,k) - local dk = d and d[k] - if dk then - if ignorespaces and type(dk) == "string" and dk:find("[^%S]") then - -- ignore - else - local n = #rr+1 - rr[n], dd[n] = r, dk - end - end - end) - return dd, rr +local function lshow(parsed) + if type(parsed) == "string" then + parsed = parse_pattern(parsed) + end + local s = table.serialize_functions -- ugly + table.serialize_functions = false -- ugly + logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false)) + table.serialize_functions = s -- ugly end -function xml.collect_texts(root, pattern, flatten) - local t = { } -- no r collector - traverse(root, lpath(pattern), function(r,d,k) - if d then - local ek = d[k] - local tx = ek and ek.dt - if flatten then - if tx then - t[#t+1] = xml.tostring(tx) or "" +xml.lshow = lshow + +local function add_comment(p,str) + local pc = p.comment + if not pc then + p.comment = { str } + else + pc[#pc+1] = str + end +end + +parse_pattern = function (pattern) -- the gain of caching is rather minimal + lpathcalls = lpathcalls + 1 + if type(pattern) == "table" then + return pattern + else + local parsed = cache[pattern] + if parsed then + lpathcached = lpathcached + 1 + else + parsed = lpegmatch(parser,pattern) + if parsed then + parsed.pattern = pattern + local np = #parsed + if np == 0 then + parsed = { pattern = pattern, register_self, state = "parsing error" } + logs.report("lpath","parsing error in '%s'",pattern) + lshow(parsed) else - t[#t+1] = "" + -- we could have done this with a more complex parser but this + -- is cleaner + local pi = parsed[1] + if pi.axis == "auto-child" then + if false then + add_comment(parsed, "auto-child replaced by auto-descendant-or-self") + parsed[1] = register_auto_descendant_or_self + else + add_comment(parsed, "auto-child replaced by auto-descendant") + parsed[1] = register_auto_descendant + end + elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then + add_comment(parsed, "initial-child removed") -- we could also make it a auto-self + remove(parsed,1) + end + local np = #parsed -- can have changed + if np > 1 then + local pnp = parsed[np] + if pnp.kind == "nodes" and pnp.nodetest == true then + local nodes = pnp.nodes + if nodes[1] == true and nodes[2] == false and nodes[3] == false then + add_comment(parsed, "redundant final wildcard filter removed") + remove(parsed,np) + end + end + end end else - t[#t+1] = tx or "" + parsed = { pattern = pattern } + end + cache[pattern] = parsed + if trace_lparse and not trace_lprofile then + lshow(parsed) end - else - t[#t+1] = "" end - end) - return t + return parsed + end end -function xml.collect_tags(root, pattern, nonamespace) - local t = { } - xml.traverse(root, xml.lpath(pattern), function(r,d,k) - local dk = d and d[k] - if dk and type(dk) == "table" then - local ns, tg = e.ns, e.tg - if nonamespace then - t[#t+1] = tg -- if needed we can return an extra table - elseif ns == "" then - t[#t+1] = tg - else - t[#t+1] = ns .. ":" .. tg +-- we can move all calls inline and then merge the trace back +-- technically we can combine axis and the next nodes which is +-- what we did before but this a bit cleaner (but slower too) +-- but interesting is that it's not that much faster when we +-- go inline +-- +-- beware: we need to return a collection even when we filter +-- else the (simple) cache gets messed up + +-- caching found lookups saves not that much (max .1 sec on a 8 sec run) +-- and it also messes up finalizers + +-- watch out: when there is a finalizer, it's always called as there +-- can be cases that a finalizer returns (or does) something in case +-- there is no match; an example of this is count() + +local profiled = { } xml.profiled = profiled + +local function profiled_apply(list,parsed,nofparsed,order) + local p = profiled[parsed.pattern] + if p then + p.tested = p.tested + 1 + else + p = { tested = 1, matched = 0, finalized = 0 } + profiled[parsed.pattern] = p + end + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) + p.matched = p.matched + 1 + p.finalized = p.finalized + 1 + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + p.finalized = p.finalized + 1 + return collected end + return nil end - end) - return #t > 0 and {} + end + if collected then + p.matched = p.matched + 1 + end + return collected +end + +local function traced_apply(list,parsed,nofparsed,order) + if trace_lparse then + lshow(parsed) + end + logs.report("lpath", "collecting : %s",parsed.pattern) + logs.report("lpath", " root tags : %s",tagstostring(list)) + logs.report("lpath", " order : %s",order or "unset") + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + collected = apply_axis[pi.axis](collected) + logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis) + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest)) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted) + elseif kind == "finalizer" then + collected = pi.finalizer(collected) + logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "") + return collected + end + if not collected or #collected == 0 then + local pn = i < nofparsed and parsed[nofparsed] + if pn and pn.kind == "finalizer" then + collected = pn.finalizer(collected) + logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "") + return collected + end + return nil + end + end + return collected end ---[[ldx-- -

Often using an iterators looks nicer in the code than passing handler -functions. The book describes how to use coroutines for that -purpose (). This permits -code like:

- - -for r, d, k in xml.elements(xml.load('text.xml'),"title") do - print(d[k]) +local function normal_apply(list,parsed,nofparsed,order) + local collected = list + for i=1,nofparsed do + local pi = parsed[i] + local kind = pi.kind + if kind == "axis" then + local axis = pi.axis + if axis ~= "self" then + collected = apply_axis[axis](collected) + end + elseif kind == "nodes" then + collected = apply_nodes(collected,pi.nodetest,pi.nodes) + elseif kind == "expression" then + collected = apply_expression(collected,pi.evaluator,order) + elseif kind == "finalizer" then + return pi.finalizer(collected) + end + if not collected or #collected == 0 then + local pf = i < nofparsed and parsed[nofparsed].finalizer + if pf then + return pf(collected) -- can be anything + end + return nil + end + end + return collected end - -

Which will print all the titles in the document. The iterator variant takes -1.5 times the runtime of the function variant which is due to the overhead in -creating the wrapper. So, instead of:

- - -function xml.filters.first(root,pattern) - for rt,dt,dk in xml.elements(root,pattern) - return dt and dt[dk], rt, dt, dk +local function parse_apply(list,pattern) + -- we avoid an extra call + local parsed = cache[pattern] + if parsed then + lpathcalls = lpathcalls + 1 + lpathcached = lpathcached + 1 + elseif type(pattern) == "table" then + lpathcalls = lpathcalls + 1 + parsed = pattern + else + parsed = parse_pattern(pattern) or pattern + end + if not parsed then + return + end + local nofparsed = #parsed + if nofparsed == 0 then + return -- something is wrong + end + local one = list[1] + if not one then + return -- something is wrong + elseif not trace_lpath then + return normal_apply(list,parsed,nofparsed,one.mi) + elseif trace_lprofile then + return profiled_apply(list,parsed,nofparsed,one.mi) + else + return traced_apply(list,parsed,nofparsed,one.mi) end - return nil, nil, nil, nil end - - -

We use the function variants in the filters.

---ldx]]-- -local wrap, yield = coroutine.wrap, coroutine.yield +-- internal (parsed) -function xml.elements(root,pattern,reverse) - return wrap(function() traverse(root, lpath(pattern), yield, reverse) end) +expressions.child = function(e,pattern) + return parse_apply({ e },pattern) -- todo: cache end - -function xml.elements_only(root,pattern,reverse) - return wrap(function() traverse(root, lpath(pattern), function(r,d,k) yield(d[k]) end, reverse) end) +expressions.count = function(e,pattern) + local collected = parse_apply({ e },pattern) -- todo: cache + return (collected and #collected) or 0 end -function xml.each_element(root, pattern, handle, reverse) - local ok - traverse(root, lpath(pattern), function(r,d,k) ok = true handle(r,d,k) end, reverse) - return ok +-- external + +expressions.oneof = function(s,...) -- slow + local t = {...} for i=1,#t do if s == t[i] then return true end end return false +end +expressions.error = function(str) + xml.error_handler("unknown function in lpath expression",tostring(str or "?")) + return false +end +expressions.undefined = function(s) + return s == nil end -function xml.process_elements(root, pattern, handle) - traverse(root, lpath(pattern), function(r,d,k) - local dkdt = d[k].dt - if dkdt then - for i=1,#dkdt do - local v = dkdt[i] - if v.tg then handle(v) end - end - end - end) +expressions.quit = function(s) + if s or s == nil then + quit_expression = true + end + return true end -function xml.process_attributes(root, pattern, handle) - traverse(root, lpath(pattern), function(r,d,k) - local ek = d[k] - local a = ek.at or { } - handle(a) - if next(a) then -- next is faster than type (and >0 test) - ek.at = a - else - ek.at = nil - end - end) +expressions.print = function(...) + print(...) + return true end ---[[ldx-- -

We've now arrives at the functions that manipulate the tree.

---ldx]]-- +expressions.contains = find +expressions.find = find +expressions.upper = upper +expressions.lower = lower +expressions.number = tonumber +expressions.boolean = toboolean -function xml.inject_element(root, pattern, element, prepend) - if root and element then - local matches, collect = { }, nil - if type(element) == "string" then - element = convert(element,true) - end - if element then - collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end - traverse(root, lpath(pattern), collect) - for i=1,#matches do - local m = matches[i] - local r, d, k, element, edt = m[1], m[2], m[3], m[4], nil - if element.ri then - element = element.dt[element.ri].dt - else - element = element.dt - end - if r.ri then - edt = r.dt[r.ri].dt - else - edt = d and d[k] and d[k].dt - end - if edt then - local be, af - if prepend then - be, af = xml.copy(element), edt - else - be, af = edt, xml.copy(element) - end - for i=1,#af do - be[#be+1] = af[i] - end - if r.ri then - r.dt[r.ri].dt = be - else - d[k].dt = be - end - else - -- r.dt = element.dt -- todo - end - end +-- user interface + +local function traverse(root,pattern,handle) + logs.report("xml","use 'xml.selection' instead for '%s'",pattern) + local collected = parse_apply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + handle(r,r.dt,e.ni) end end end --- todo: copy ! - -function xml.insert_element(root, pattern, element, before) -- todo: element als functie - if root and element then - if pattern == "/" then - xml.inject_element(root, pattern, element, before) - else - local matches, collect = { }, nil - if type(element) == "string" then - element = convert(element,true) - end - if element and element.ri then - element = element.dt[element.ri] - end - if element then - collect = function(r,d,k) matches[#matches+1] = { r, d, k, element } end - traverse(root, lpath(pattern), collect) - for i=#matches,1,-1 do - local m = matches[i] - local r, d, k, element = m[1], m[2], m[3], m[4] - if not before then k = k + 1 end - if element.tg then - insert(d,k,element) -- untested ---~ elseif element.dt then ---~ for _,v in ipairs(element.dt) do -- i added ---~ insert(d,k,v) ---~ k = k + 1 ---~ end ---~ end - else - local edt = element.dt - if edt then - for i=1,#edt do - insert(d,k,edt[i]) - k = k + 1 - end - end - end - end +local function selection(root,pattern,handle) + local collected = parse_apply({ root },pattern) + if collected then + if handle then + for c=1,#collected do + handle(collected[c]) end + else + return collected end end end -xml.insert_element_after = xml.insert_element -xml.insert_element_before = function(r,p,e) xml.insert_element(r,p,e,true) end -xml.inject_element_after = xml.inject_element -xml.inject_element_before = function(r,p,e) xml.inject_element(r,p,e,true) end +xml.parse_parser = parser +xml.parse_pattern = parse_pattern +xml.parse_apply = parse_apply +xml.traverse = traverse -- old method, r, d, k +xml.selection = selection -- new method, simple handle -function xml.delete_element(root, pattern) - local matches, deleted = { }, { } - local collect = function(r,d,k) matches[#matches+1] = { r, d, k } end - traverse(root, lpath(pattern), collect) - for i=#matches,1,-1 do - local m = matches[i] - deleted[#deleted+1] = remove(m[2],m[3]) - end - return deleted +local lpath = parse_pattern + +xml.lpath = lpath + +function xml.cached_patterns() + return cache end -function xml.replace_element(root, pattern, element) - if type(element) == "string" then - element = convert(element,true) - end - if element and element.ri then - element = element.dt[element.ri] - end - if element then - traverse(root, lpath(pattern), function(rm, d, k) - d[k] = element.dt -- maybe not clever enough - end) +-- generic function finalizer (independant namespace) + +local function dofunction(collected,fnc) + if collected then + local f = functions[fnc] + if f then + for c=1,#collected do + f(collected[c]) + end + else + logs.report("xml","unknown function '%s'",fnc) + end end end -local function load_data(name) -- == io.loaddata - local f, data = io.open(name), "" - if f then - data = f:read("*all",'b') -- 'b' ? - f:close() - end - return data +xml.finalizers.xml["function"] = dofunction +xml.finalizers.tex["function"] = dofunction + +-- functions + +expressions.text = function(e,n) + local rdt = e.__p__.dt + return (rdt and rdt[n]) or "" end -function xml.include(xmldata,pattern,attribute,recursive,loaddata) - -- parse="text" (default: xml), encoding="" (todo) - -- attribute = attribute or 'href' - pattern = pattern or 'include' - loaddata = loaddata or load_data - local function include(r,d,k) - local ek, name = d[k], nil - if not attribute or attribute == "" then - local ekdt = ek.dt - name = (type(ekdt) == "table" and ekdt[1]) or ekdt - end - if not name then - if ek.at then - for a in gmatch(attribute or "href","([^|]+)") do - name = ek.at[a] - if name then break end +expressions.name = function(e,n) -- ns + tg + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = type(e) == "table" and e + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break + else + n = n + 1 end end end - local data = (name and name ~= "" and loaddata(name)) or "" - if data == "" then - xml.empty(d,k) - elseif ek.at["parse"] == "text" then -- for the moment hard coded - d[k] = xml.escaped(data) - else - local xi = xml.convert(data) - if not xi then - xml.empty(d,k) - else - if recursive then - xml.include(xi,pattern,attribute,recursive,loaddata) + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 end - xml.assign(d,k,xi) end end end - xml.each_element(xmldata, pattern, include) + if found then + local ns, tg = found.rn or found.ns or "", found.tg + if ns ~= "" then + return ns .. ":" .. tg + else + return tg + end + else + return "" + end end -function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space ! - traverse(root, lpath(pattern), function(r,d,k) - local dkdt = d[k].dt - if dkdt then -- can be optimized - local t = { } - for i=1,#dkdt do - local str = dkdt[i] - if type(str) == "string" then - - if str == "" then - -- stripped +expressions.tag = function(e,n) -- only tg + if not e then + return "" + else + local found = false + n = tonumber(n) or 0 + if n == 0 then + found = (type(e) == "table") and e -- seems to fail + elseif n < 0 then + local d, k = e.__p__.dt, e.ni + for i=k-1,1,-1 do + local di = d[i] + if type(di) == "table" then + if n == -1 then + found = di + break else - if nolines then - str = gsub(str,"[ \n\r\t]+"," ") - end - if str == "" then - -- stripped - else - t[#t+1] = str - end + n = n + 1 end - else - t[#t+1] = str end end - d[k].dt = t - end - end) -end - -local function rename_space(root, oldspace, newspace) -- fast variant - local ndt = #root.dt - for i=1,ndt or 0 do - local e = root[i] - if type(e) == "table" then - if e.ns == oldspace then - e.ns = newspace - if e.rn then - e.rn = newspace + else + local d, k = e.__p__.dt, e.ni + for i=k+1,#d,1 do + local di = d[i] + if type(di) == "table" then + if n == 1 then + found = di + break + else + n = n - 1 + end end end - local edt = e.dt - if edt then - rename_space(edt, oldspace, newspace) - end end + return (found and found.tg) or "" end end -xml.rename_space = rename_space +--[[ldx-- +

This is the main filter function. It returns whatever is asked for.

+--ldx]]-- -function xml.remap_tag(root, pattern, newtg) - traverse(root, lpath(pattern), function(r,d,k) - d[k].tg = newtg - end) -end -function xml.remap_namespace(root, pattern, newns) - traverse(root, lpath(pattern), function(r,d,k) - d[k].ns = newns - end) +function xml.filter(root,pattern) -- no longer funny attribute handling here + return parse_apply({ root },pattern) end -function xml.check_namespace(root, pattern, newns) - traverse(root, lpath(pattern), function(r,d,k) - local dk = d[k] - if (not dk.rn or dk.rn == "") and dk.ns == "" then - dk.rn = newns - end - end) + +--[[ldx-- +

Often using an iterators looks nicer in the code than passing handler +functions. The book describes how to use coroutines for that +purpose (). This permits +code like:

+ + +for r, d, k in xml.elements(xml.load('text.xml'),"title") do + print(d[k]) -- old method end -function xml.remap_name(root, pattern, newtg, newns, newrn) - traverse(root, lpath(pattern), function(r,d,k) - local dk = d[k] - dk.tg = newtg - dk.ns = newns - dk.rn = newrn - end) +for e in xml.collected(xml.load('text.xml'),"title") do + print(e) -- new one end + +--ldx]]-- -function xml.filters.found(root,pattern,check_content) - local found = false - traverse(root, lpath(pattern), function(r,d,k) - if check_content then - local dk = d and d[k] - found = dk and dk.dt and next(dk.dt) and true +local wrap, yield = coroutine.wrap, coroutine.yield + +function xml.elements(root,pattern,reverse) -- r, d, k + local collected = parse_apply({ root },pattern) + if collected then + if reverse then + return wrap(function() for c=#collected,1,-1 do + local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) + end end) else - found = true + return wrap(function() for c=1,#collected do + local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni) + end end) end - return true - end) - return found + end + return wrap(function() end) end ---[[ldx-- -

Here are a few synonyms.

---ldx]]-- +function xml.collected(root,pattern,reverse) -- e + local collected = parse_apply({ root },pattern) + if collected then + if reverse then + return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end) + else + return wrap(function() for c=1,#collected do yield(collected[c]) end end) + end + end + return wrap(function() end) +end -xml.filters.position = xml.filters.index -xml.count = xml.filters.count -xml.index = xml.filters.index -xml.position = xml.filters.index -xml.first = xml.filters.first -xml.last = xml.filters.last -xml.found = xml.filters.found +end -- of closure -xml.each = xml.each_element -xml.process = xml.process_element -xml.strip = xml.strip_whitespace -xml.collect = xml.collect_elements -xml.all = xml.collect_elements +do -- create closure to overcome 200 locals limit -xml.insert = xml.insert_element_after -xml.inject = xml.inject_element_after -xml.after = xml.insert_element_after -xml.before = xml.insert_element_before -xml.delete = xml.delete_element -xml.replace = xml.replace_element +if not modules then modules = { } end modules ['lxml-mis'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local concat = table.concat +local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring +local format, gsub, match = string.format, string.gsub, string.match +local lpegmatch = lpeg.match --[[ldx-- -

The following helper functions best belong to the lmxl-ini +

The following helper functions best belong to the lxml-ini module. Some are here because we need then in the mk document and other manuals, others came up when playing with this module. Since this module is also used in we've put them here instead of loading mode modules there then needed.

--ldx]]-- -function xml.gsub(t,old,new) +local function xmlgsub(t,old,new) -- will be replaced local dt = t.dt if dt then for k=1,#dt do @@ -5069,28 +6366,26 @@ function xml.gsub(t,old,new) if type(v) == "string" then dt[k] = gsub(v,old,new) else - xml.gsub(v,old,new) + xmlgsub(v,old,new) end end end end +--~ xml.gsub = xmlgsub + function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual - if d and k and d[k-1] and type(d[k-1]) == "string" then - local s = d[k-1]:match("\n(%s+)") - xml.gsub(dk,"\n"..string.rep(" ",#s),"\n") + if d and k then + local dkm = d[k-1] + if dkm and type(dkm) == "string" then + local s = match(dkm,"\n(%s+)") + xmlgsub(dk,"\n"..rep(" ",#s),"\n") + end end end -function xml.serialize_path(root,lpath,handle) - local dk, r, d, k = xml.first(root,lpath) - dk = xml.copy(dk) - xml.strip_leading_spaces(dk,d,k) - xml.serialize(dk,handle) -end - --~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } ---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end +--~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end --~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end --~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end @@ -5114,8 +6409,6 @@ local escaped = Cs(normal * (special * normal)^0) -- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) --- unescaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) --- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0) local normal = (1 - S"&")^0 local special = P("<")/"<" + P(">")/">" + P("&")/"&" local unescaped = Cs(normal * (special * normal)^0) @@ -5124,84 +6417,32 @@ local unescaped = Cs(normal * (special * normal)^0) local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) -function xml.escaped (str) return escaped :match(str) end -function xml.unescaped(str) return unescaped:match(str) end -function xml.cleansed (str) return cleansed :match(str) end +xml.escaped_pattern = escaped +xml.unescaped_pattern = unescaped +xml.cleansed_pattern = cleansed -function xml.join(t,separator,lastseparator) - if #t > 0 then - local result = { } - for k,v in pairs(t) do - result[k] = xml.tostring(v) - end - if lastseparator then - return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result] - else - return concat(result,separator) +function xml.escaped (str) return lpegmatch(escaped,str) end +function xml.unescaped(str) return lpegmatch(unescaped,str) end +function xml.cleansed (str) return lpegmatch(cleansed,str) end + +-- this might move + +function xml.fillin(root,pattern,str,check) + local e = xml.first(root,pattern) + if e then + local n = #e.dt + if not check or n == 0 or (n == 1 and e.dt[1] == "") then + e.dt = { str } end - else - return "" end end -function xml.statistics() - return { - lpathcalls = lpathcalls, - lpathcached = lpathcached, - } -end - --- xml.set_text_cleanup(xml.show_text_entities) --- xml.set_text_cleanup(xml.resolve_text_entities) - ---~ xml.lshow("/../../../a/(b|c)[@d='e']/f") ---~ xml.lshow("/../../../a/!(b|c)[@d='e']/f") ---~ xml.lshow("/../../../a/!b[@d!='e']/f") - ---~ x = xml.convert([[ ---~ ---~ 01 ---~ 02 ---~ 03 ---~ OK ---~ 05 ---~ 06 ---~ ALSO OK ---~ ---~ ]]) - ---~ xml.settrace("lpath",true) - ---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == 'ok']")) ---~ xml.xshow(xml.first(x,"b[position() > 2 and position() < 5 and text() == upper('ok')]")) ---~ xml.xshow(xml.first(x,"b[@n=='03' or @n=='08']")) ---~ xml.xshow(xml.all (x,"b[number(@n)>2 and number(@n)<6]")) ---~ xml.xshow(xml.first(x,"b[find(text(),'ALSO')]")) - ---~ str = [[ ---~ ---~ ---~ my secret ---~ ---~ ]] - ---~ x = xml.convert([[ ---~ 0102xx03OK ---~ ]]) ---~ xml.xshow(xml.first(x,"b[tag(2) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(1) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(-1) == 'x']")) ---~ xml.xshow(xml.first(x,"b[tag(-2) == 'x']")) - ---~ print(xml.filter(x,"b/tag(2)")) ---~ print(xml.filter(x,"b/tag(1)")) - end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['lxml-ent'] = { +if not modules then modules = { } end modules ['lxml-aux'] = { version = 1.001, comment = "this module is the basis for the lxml-* ones", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", @@ -5209,457 +6450,836 @@ if not modules then modules = { } end modules ['lxml-ent'] = { license = "see context related readme files" } -local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, gsub, find = string.format, string.gsub, string.find -local utfchar = unicode.utf8.char +-- not all functions here make sense anymore vbut we keep them for +-- compatibility reasons ---[[ldx-- -

We provide (at least here) two entity handlers. The more extensive -resolver consults a hash first, tries to convert to next, -and finaly calls a handler when defines. When this all fails, the -original entity is returned.

---ldx]]-- +local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end) -xml.entities = xml.entities or { } -- xml.entity_handler == function +local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name +local xmlinheritedconvert = xml.inheritedconvert -function xml.entity_handler(e) - return format("[%s]",e) -end +local type = type +local insert, remove = table.insert, table.remove +local gmatch, gsub = string.gmatch, string.gsub -local function toutf(s) - return utfchar(tonumber(s,16)) +local function report(what,pattern,c,e) + logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern) end -local function utfize(root) - local d = root.dt - for k=1,#d do - local dk = d[k] - if type(dk) == "string" then - -- test prevents copying if no match - if find(dk,"&#x.-;") then - d[k] = gsub(dk,"&#x(.-);",toutf) +local function withelements(e,handle,depth) + if e and handle then + local edt = e.dt + if edt then + depth = depth or 0 + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + handle(e,depth) + withelements(e,handle,depth+1) + end end - else - utfize(dk) end end end -xml.utfize = utfize +xml.withelements = withelements -local function resolve(e) -- hex encoded always first, just to avoid mkii fallbacks - if find(e,"^#x") then - return utfchar(tonumber(e:sub(3),16)) - elseif find(e,"^#") then - return utfchar(tonumber(e:sub(2))) - else - local ee = xml.entities[e] -- we cannot shortcut this one (is reloaded) - if ee then - return ee - else - local h = xml.entity_handler - return (h and h(e)) or "&" .. e .. ";" +function xml.withelement(e,n,handle) -- slow + if e and n ~= 0 and handle then + local edt = e.dt + if edt then + if n > 0 then + for i=1,#edt do + local ei = edt[i] + if type(ei) == "table" then + if n == 1 then + handle(ei) + return + else + n = n - 1 + end + end + end + elseif n < 0 then + for i=#edt,1,-1 do + local ei = edt[i] + if type(ei) == "table" then + if n == -1 then + handle(ei) + return + else + n = n + 1 + end + end + end + end end end end -local function resolve_entities(root) - if not root.special or root.tg == "@rt@" then - local d = root.dt - for k=1,#d do - local dk = d[k] - if type(dk) == "string" then - if find(dk,"&.-;") then - d[k] = gsub(dk,"&(.-);",resolve) - end - else - resolve_entities(dk) +xml.elements_only = xml.collected + +function xml.each_element(root,pattern,handle,reverse) + local collected = xmlparseapply({ root },pattern) + if collected then + if reverse then + for c=#collected,1,-1 do + handle(collected[c]) + end + else + for c=1,#collected do + handle(collected[c]) end end + return collected end end -xml.resolve_entities = resolve_entities +xml.process_elements = xml.each_element -function xml.utfize_text(str) - if find(str,"&#") then - return (gsub(str,"&#x(.-);",toutf)) - else - return str +function xml.process_attributes(root,pattern,handle) + local collected = xmlparseapply({ root },pattern) + if collected and handle then + for c=1,#collected do + handle(collected[c].at) + end end + return collected end -function xml.resolve_text_entities(str) -- maybe an lpeg. maybe resolve inline - if find(str,"&") then - return (gsub(str,"&(.-);",resolve)) - else - return str - end +--[[ldx-- +

The following functions collect elements and texts.

+--ldx]]-- + +-- are these still needed -> lxml-cmp.lua + +function xml.collect_elements(root, pattern) + return xmlparseapply({ root },pattern) end -function xml.show_text_entities(str) - if find(str,"&") then - return (gsub(str,"&(.-);","[%1]")) - else - return str +function xml.collect_texts(root, pattern, flatten) -- todo: variant with handle + local collected = xmlparseapply({ root },pattern) + if collected and flatten then + local xmltostring = xml.tostring + for c=1,#collected do + collected[c] = xmltostring(collected[c].dt) + end end + return collected or { } end --- experimental, this will be done differently - -function xml.merge_entities(root) - local documententities = root.entities - local allentities = xml.entities - if documententities then - for k, v in next, documententities do - allentities[k] = v +function xml.collect_tags(root, pattern, nonamespace) + local collected = xmlparseapply({ root },pattern) + if collected then + local t = { } + for c=1,#collected do + local e = collected[c] + local ns, tg = e.ns, e.tg + if nonamespace then + t[#t+1] = tg + elseif ns == "" then + t[#t+1] = tg + else + t[#t+1] = ns .. ":" .. tg + end end + return t end end +--[[ldx-- +

We've now arrived at the functions that manipulate the tree.

+--ldx]]-- -end -- of closure +local no_root = { no_root = true } -do -- create closure to overcome 200 locals limit +function xml.redo_ni(d) + for k=1,#d do + local dk = d[k] + if type(dk) == "table" then + dk.ni = k + end + end +end -if not modules then modules = { } end modules ['lxml-mis'] = { - version = 1.001, - comment = "this module is the basis for the lxml-* ones", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} +local function xmltoelement(whatever,root) + if not whatever then + return nil + end + local element + if type(whatever) == "string" then + element = xmlinheritedconvert(whatever,root) + else + element = whatever -- we assume a table + end + if element.error then + return whatever -- string + end + if element then + --~ if element.ri then + --~ element = element.dt[element.ri].dt + --~ else + --~ element = element.dt + --~ end + end + return element +end -local concat = table.concat -local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring -local format, gsub = string.format, string.gsub +xml.toelement = xmltoelement ---[[ldx-- -

The following helper functions best belong to the lmxl-ini -module. Some are here because we need then in the mk -document and other manuals, others came up when playing with -this module. Since this module is also used in we've -put them here instead of loading mode modules there then needed.

---ldx]]-- +local function copiedelement(element,newparent) + if type(element) == "string" then + return element + else + element = xmlcopy(element).dt + if newparent and type(element) == "table" then + element.__p__ = newparent + end + return element + end +end -function xml.gsub(t,old,new) - local dt = t.dt - if dt then - for k=1,#dt do - local v = dt[k] - if type(v) == "string" then - dt[k] = gsub(v,old,new) - else - xml.gsub(v,old,new) +function xml.delete_element(root,pattern) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('deleting',pattern,c,e) + end + local d = p.dt + remove(d,e.ni) + xml.redo_ni(d) -- can be made faster and inlined end end end end -function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual - if d and k and d[k-1] and type(d[k-1]) == "string" then - local s = d[k-1]:match("\n(%s+)") - xml.gsub(dk,"\n"..string.rep(" ",#s),"\n") +function xml.replace_element(root,pattern,whatever) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local p = e.__p__ + if p then + if trace_manipulations then + report('replacing',pattern,c,e) + end + local d = p.dt + d[e.ni] = copiedelement(element,p) + xml.redo_ni(d) -- probably not needed + end + end end end -function xml.serialize_path(root,lpath,handle) - local dk, r, d, k = xml.first(root,lpath) - dk = xml.copy(dk) - xml.strip_leading_spaces(dk,d,k) - xml.serialize(dk,handle) +local function inject_element(root,pattern,whatever,prepend) + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + local d, k, rri = r.dt, e.ni, r.ri + local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt) + if edt then + local be, af + local cp = copiedelement(element,e) + if prepend then + be, af = cp, edt + else + be, af = edt, cp + end + for i=1,#af do + be[#be+1] = af[i] + end + if rri then + r.dt[rri].dt = be + else + d[k].dt = be + end + xml.redo_ni(d) + end + end + end end ---~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' } ---~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end +local function insert_element(root,pattern,whatever,before) -- todo: element als functie + local element = root and xmltoelement(whatever,root) + local collected = element and xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + local r = e.__p__ + local d, k = r.dt, e.ni + if not before then + k = k + 1 + end + insert(d,k,copiedelement(element,r)) + xml.redo_ni(d) + end + end +end ---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end ---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end ---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>" +xml.insert_element = insert_element +xml.insert_element_after = insert_element +xml.insert_element_before = function(r,p,e) insert_element(r,p,e,true) end +xml.inject_element = inject_element +xml.inject_element_after = inject_element +xml.inject_element_before = function(r,p,e) inject_element(r,p,e,true) end -local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs +local function include(xmldata,pattern,attribute,recursive,loaddata) + -- parse="text" (default: xml), encoding="" (todo) + -- attribute = attribute or 'href' + pattern = pattern or 'include' + loaddata = loaddata or io.loaddata + local collected = xmlparseapply({ xmldata },pattern) + if collected then + for c=1,#collected do + local ek = collected[c] + local name = nil + local ekdt = ek.dt + local ekat = ek.at + local epdt = ek.__p__.dt + if not attribute or attribute == "" then + name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- ckeck, probably always tab or str + end + if not name then + for a in gmatch(attribute or "href","([^|]+)") do + name = ekat[a] + if name then break end + end + end + local data = (name and name ~= "" and loaddata(name)) or "" + if data == "" then + epdt[ek.ni] = "" -- xml.empty(d,k) + elseif ekat["parse"] == "text" then + -- for the moment hard coded + epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data) + else +--~ local settings = xmldata.settings +--~ settings.parent_root = xmldata -- to be tested +--~ local xi = xmlconvert(data,settings) + local xi = xmlinheritedconvert(data,xmldata) + if not xi then + epdt[ek.ni] = "" -- xml.empty(d,k) + else + if recursive then + include(xi,pattern,attribute,recursive,loaddata) + end + epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi) + end + end + end + end +end --- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg --- --- 1021:0335:0287:0247 +xml.include = include --- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ" --- --- 1559:0257:0288:0190 (last one suggested by roberto) +--~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away +--~ local collected = xmlparseapply({ xmldata },pattern) +--~ if collected then +--~ local xmltostring = xml.tostring +--~ for c=1,#collected do +--~ local e = collected[c] +--~ local data = manipulator(xmltostring(e)) +--~ if data == "" then +--~ epdt[e.ni] = "" +--~ else +--~ local xi = xmlinheritedconvert(data,xmldata) +--~ if not xi then +--~ epdt[e.ni] = "" +--~ else +--~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi) +--~ end +--~ end +--~ end +--~ end +--~ end --- escaped = Cs((S("<&>") / xml.escapes + 1)^0) --- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) -local normal = (1 - S("<&>"))^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local escaped = Cs(normal * (special * normal)^0) +--~ xml.manipulate = manipulate --- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto) +function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space ! + local collected = xmlparseapply({ root },pattern) + if collected then + for i=1,#collected do + local e = collected[i] + local edt = e.dt + if edt then + local t = { } + for i=1,#edt do + local str = edt[i] + if type(str) == "string" then + if str == "" then + -- stripped + else + if nolines then + str = gsub(str,"[ \n\r\t]+"," ") + end + if str == "" then + -- stripped + else + t[#t+1] = str + end + end + else + --~ str.ni = i + t[#t+1] = str + end + end + e.dt = t + end + end + end +end --- unescaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0) --- unescaped = Cs((((P("&")/"") * (P("lt")/"<" + P("gt")/">" + P("amp")/"&") * (P(";")/"")) + 1)^0) -local normal = (1 - S"&")^0 -local special = P("<")/"<" + P(">")/">" + P("&")/"&" -local unescaped = Cs(normal * (special * normal)^0) +function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing + local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now + if collected then + for i=1,#collected do + local e = collected[i] + local edt = e.dt + if edt then + if anywhere then + local t = { } + for e=1,#edt do + local str = edt[e] + if type(str) ~= "string" then + t[#t+1] = str + elseif str ~= "" then + -- todo: lpeg for each case + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s*(.-)%s*$","%1") + if str ~= "" then + t[#t+1] = str + end + end + end + e.dt = t + else + -- we can assume a regular sparse xml table with no successive strings + -- otherwise we should use a while loop + if #edt > 0 then + -- strip front + local str = edt[1] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt,1) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s+","") + if str == "" then + remove(edt,1) + else + edt[1] = str + end + end + end + if #edt > 1 then + -- strip end + local str = edt[#edt] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"%s+$","") + if str == "" then + remove(edt) + else + edt[#edt] = str + end + end + end + end + end + end + end +end --- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference) +local function rename_space(root, oldspace, newspace) -- fast variant + local ndt = #root.dt + for i=1,ndt or 0 do + local e = root[i] + if type(e) == "table" then + if e.ns == oldspace then + e.ns = newspace + if e.rn then + e.rn = newspace + end + end + local edt = e.dt + if edt then + rename_space(edt, oldspace, newspace) + end + end + end +end -local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0) +xml.rename_space = rename_space -xml.escaped_pattern = escaped -xml.unescaped_pattern = unescaped -xml.cleansed_pattern = cleansed +function xml.remap_tag(root, pattern, newtg) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + collected[c].tg = newtg + end + end +end -function xml.escaped (str) return escaped :match(str) end -function xml.unescaped(str) return unescaped:match(str) end -function xml.cleansed (str) return cleansed :match(str) end +function xml.remap_namespace(root, pattern, newns) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + collected[c].ns = newns + end + end +end -function xml.join(t,separator,lastseparator) - if #t > 0 then - local result = { } - for k,v in pairs(t) do - result[k] = xml.tostring(v) +function xml.check_namespace(root, pattern, newns) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + if (not e.rn or e.rn == "") and e.ns == "" then + e.rn = newns + end end - if lastseparator then - return concat(result,separator or "",1,#result-1) .. (lastseparator or "") .. result[#result] - else - return concat(result,separator) + end +end + +function xml.remap_name(root, pattern, newtg, newns, newrn) + local collected = xmlparseapply({ root },pattern) + if collected then + for c=1,#collected do + local e = collected[c] + e.tg, e.ns, e.rn = newtg, newns, newrn end - else - return "" end end +--[[ldx-- +

Here are a few synonyms.

+--ldx]]-- + +xml.each = xml.each_element +xml.process = xml.process_element +xml.strip = xml.strip_whitespace +xml.collect = xml.collect_elements +xml.all = xml.collect_elements + +xml.insert = xml.insert_element_after +xml.inject = xml.inject_element_after +xml.after = xml.insert_element_after +xml.before = xml.insert_element_before +xml.delete = xml.delete_element +xml.replace = xml.replace_element + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['trac-tra'] = { +if not modules then modules = { } end modules ['lxml-xml'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "this module is the basis for the lxml-* ones", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } --- the tag is kind of generic and used for functions that are not --- bound to a variable, like node.new, node.copy etc (contrary to for instance --- node.has_attribute which is bound to a has_attribute local variable in mkiv) +local finalizers = xml.finalizers.xml +local xmlfilter = xml.filter -- we could inline this one for speed +local xmltostring = xml.tostring +local xmlserialize = xml.serialize -debugger = debugger or { } +local function first(collected) -- wrong ? + return collected and collected[1] +end -local counters = { } -local names = { } -local getinfo = debug.getinfo -local format, find, lower, gmatch = string.format, string.find, string.lower, string.gmatch +local function last(collected) + return collected and collected[#collected] +end --- one +local function all(collected) + return collected +end -local function hook() - local f = getinfo(2,"f").func - local n = getinfo(2,"Sn") --- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end - if f then - local cf = counters[f] - if cf == nil then - counters[f] = 1 - names[f] = n - else - counters[f] = cf + 1 +local function reverse(collected) + if collected then + local reversed = { } + for c=#collected,1,-1 do + reversed[#reversed+1] = collected[c] end + return reversed end end -local function getname(func) - local n = names[func] - if n then - if n.what == "C" then - return n.name or '' - else - -- source short_src linedefined what name namewhat nups func - local name = n.name or n.namewhat or n.what - if not name or name == "" then name = "?" end - return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name) - end - else - return "unknown" + +local function attribute(collected,name) + if collected and #collected > 0 then + local at = collected[1].at + return at and at[name] end end -function debugger.showstats(printer,threshold) - printer = printer or texio.write or print - threshold = threshold or 0 - local total, grandtotal, functions = 0, 0, 0 - printer("\n") -- ugly but ok - -- table.sort(counters) - for func, count in pairs(counters) do - if count > threshold then - local name = getname(func) - if not name:find("for generator") then - printer(format("%8i %s", count, name)) - total = total + count - end - end - grandtotal = grandtotal + count - functions = functions + 1 - end - printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) + +local function att(id,name) + local at = id.at + return at and at[name] end --- two +local function count(collected) + return (collected and #collected) or 0 +end ---~ local function hook() ---~ local n = getinfo(2) ---~ if n.what=="C" and not n.name then ---~ local f = tostring(debug.traceback()) ---~ local cf = counters[f] ---~ if cf == nil then ---~ counters[f] = 1 ---~ names[f] = n ---~ else ---~ counters[f] = cf + 1 ---~ end ---~ end ---~ end ---~ function debugger.showstats(printer,threshold) ---~ printer = printer or texio.write or print ---~ threshold = threshold or 0 ---~ local total, grandtotal, functions = 0, 0, 0 ---~ printer("\n") -- ugly but ok ---~ -- table.sort(counters) ---~ for func, count in pairs(counters) do ---~ if count > threshold then ---~ printer(format("%8i %s", count, func)) ---~ total = total + count ---~ end ---~ grandtotal = grandtotal + count ---~ functions = functions + 1 ---~ end ---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold)) ---~ end +local function position(collected,n) + if collected then + n = tonumber(n) or 0 + if n < 0 then + return collected[#collected + n + 1] + elseif n > 0 then + return collected[n] + else + return collected[1].mi or 0 + end + end +end --- rest +local function match(collected) + return (collected and collected[1].mi) or 0 -- match +end -function debugger.savestats(filename,threshold) - local f = io.open(filename,'w') - if f then - debugger.showstats(function(str) f:write(str) end,threshold) - f:close() +local function index(collected) + if collected then + return collected[1].ni end end -function debugger.enable() - debug.sethook(hook,"c") +local function attributes(collected,arguments) + if collected then + local at = collected[1].at + if arguments then + return at[arguments] + elseif next(at) then + return at -- all of them + end + end end -function debugger.disable() - debug.sethook() ---~ counters[debug.getinfo(2,"f").func] = nil +local function chainattribute(collected,arguments) -- todo: optional levels + if collected then + local e = collected[1] + while e do + local at = e.at + if at then + local a = at[arguments] + if a then + return a + end + else + break -- error + end + e = e.__p__ + end + end + return "" end -function debugger.tracing() - local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0 - if n > 0 then - function debugger.tracing() return true end ; return true +local function raw(collected) -- hybrid + if collected then + local e = collected[1] or collected + return (e and xmlserialize(e)) or "" -- only first as we cannot concat function else - function debugger.tracing() return false end ; return false + return "" end end ---~ debugger.enable() - ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) ---~ print(math.sin(1*.5)) - ---~ debugger.disable() - ---~ print("") ---~ debugger.showstats() ---~ print("") ---~ debugger.showstats(print,3) - -trackers = trackers or { } +local function text(collected) -- hybrid + if collected then + local e = collected[1] or collected + return (e and xmltostring(e.dt)) or "" + else + return "" + end +end -local data, done = { }, { } +local function texts(collected) + if collected then + local t = { } + for c=1,#collected do + local e = collection[c] + if e and e.dt then + t[#t+1] = e.dt + end + end + return t + end +end -local function set(what,value) - if type(what) == "string" then - what = aux.settings_to_array(what) +local function tag(collected,n) + if collected then + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[#collected-n+1] + end + return c and c.tg end - for i=1,#what do - local w = what[i] - for d, f in next, data do - if done[d] then - -- prevent recursion due to wildcards - elseif find(d,w) then - done[d] = true - for i=1,#f do - f[i](value) - end +end + +local function name(collected,n) + if collected then + local c + if n == 0 or not n then + c = collected[1] + elseif n > 1 then + c = collected[n] + else + c = collected[#collected-n+1] + end + if c then + if c.ns == "" then + return c.tg + else + return c.ns .. ":" .. c.tg end end end end -local function reset() - for d, f in next, data do - for i=1,#f do - f[i](false) +local function tags(collected,nonamespace) + if collected then + local t = { } + for c=1,#collected do + local e = collected[c] + local ns, tg = e.ns, e.tg + if nonamespace or ns == "" then + t[#t+1] = tg + else + t[#t+1] = ns .. ":" .. tg + end end + return t end end -function trackers.register(what,...) - what = lower(what) - local w = data[what] - if not w then - w = { } - data[what] = w - end - for _, fnc in next, { ... } do - local typ = type(fnc) - if typ == "function" then - w[#w+1] = fnc - elseif typ == "string" then - w[#w+1] = function(value) set(fnc,value,nesting) end +local function empty(collected) + if collected then + for c=1,#collected do + local e = collected[c] + if e then + local edt = e.dt + if edt then + local n = #edt + if n == 1 then + local edk = edt[1] + local typ = type(edk) + if typ == "table" then + return false + elseif edk ~= "" then -- maybe an extra tester for spacing only + return false + end + elseif n > 1 then + return false + end + end + end end end + return true end -function trackers.enable(what) - done = { } - set(what,true) +finalizers.first = first +finalizers.last = last +finalizers.all = all +finalizers.reverse = reverse +finalizers.elements = all +finalizers.default = all +finalizers.attribute = attribute +finalizers.att = att +finalizers.count = count +finalizers.position = position +finalizers.match = match +finalizers.index = index +finalizers.attributes = attributes +finalizers.chainattribute = chainattribute +finalizers.text = text +finalizers.texts = texts +finalizers.tag = tag +finalizers.name = name +finalizers.tags = tags +finalizers.empty = empty + +-- shortcuts -- we could support xmlfilter(id,pattern,first) + +function xml.first(id,pattern) + return first(xmlfilter(id,pattern)) end -function trackers.disable(what) - done = { } - if not what or what == "" then - trackers.reset(what) +function xml.last(id,pattern) + return last(xmlfilter(id,pattern)) +end + +function xml.count(id,pattern) + return count(xmlfilter(id,pattern)) +end + +function xml.attribute(id,pattern,a,default) + return attribute(xmlfilter(id,pattern),a,default) +end + +function xml.raw(id,pattern) + if pattern then + return raw(xmlfilter(id,pattern)) else - set(what,false) + return raw(id) end end -function trackers.reset(what) - done = { } - reset() +function xml.text(id,pattern) + if pattern then + -- return text(xmlfilter(id,pattern)) + local collected = xmlfilter(id,pattern) + return (collected and xmltostring(collected[1].dt)) or "" + elseif id then + -- return text(id) + return xmltostring(id.dt) or "" + else + return "" + end end -function trackers.list() -- pattern - local list = table.sortedkeys(data) - local user, system = { }, { } - for l=1,#list do - local what = list[l] - if find(what,"^%*") then - system[#system+1] = what - else - user[#user+1] = what - end - end - return user, system +xml.content = text + +function xml.position(id,pattern,n) -- element + return position(xmlfilter(id,pattern),n) +end + +function xml.match(id,pattern) -- number + return match(xmlfilter(id,pattern)) +end + +function xml.empty(id,pattern) + return empty(xmlfilter(id,pattern)) end +xml.all = xml.filter +xml.index = xml.position +xml.found = xml.filter + end -- of closure @@ -5667,7 +7287,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-env'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -5679,10 +7299,10 @@ if not modules then modules = { } end modules ['luat-env'] = { -- evolved before bytecode arrays were available and so a lot of -- code has disappeared already. -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local format = string.format +local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find +local unquote, quote = string.unquote, string.quote -- precautions @@ -5716,13 +7336,14 @@ if not environment.jobname then environ function environment.initialize_arguments(arg) local arguments, files = { }, { } environment.arguments, environment.files, environment.sortedflags = arguments, files, nil - for index, argument in pairs(arg) do + for index=1,#arg do + local argument = arg[index] if index > 0 then - local flag, value = argument:match("^%-+(.+)=(.-)$") + local flag, value = match(argument,"^%-+(.-)=(.-)$") if flag then - arguments[flag] = string.unquote(value or "") + arguments[flag] = unquote(value or "") else - flag = argument:match("^%-+(.+)") + flag = match(argument,"^%-+(.+)") if flag then arguments[flag] = true else @@ -5749,25 +7370,30 @@ function environment.argument(name,partial) return arguments[name] elseif partial then if not sortedflags then - sortedflags = { } - for _,v in pairs(table.sortedkeys(arguments)) do - sortedflags[#sortedflags+1] = "^" .. v + sortedflags = table.sortedkeys(arguments) + for k=1,#sortedflags do + sortedflags[k] = "^" .. sortedflags[k] end environment.sortedflags = sortedflags end -- example of potential clash: ^mode ^modefile - for _,v in ipairs(sortedflags) do - if name:find(v) then - return arguments[v:sub(2,#v)] + for k=1,#sortedflags do + local v = sortedflags[k] + if find(name,v) then + return arguments[sub(v,2,#v)] end end end return nil end +environment.argument("x",true) + function environment.split_arguments(separator) -- rather special, cut-off before separator local done, before, after = false, { }, { } - for _,v in ipairs(environment.original_arguments) do + local original_arguments = environment.original_arguments + for k=1,#original_arguments do + local v = original_arguments[k] if not done and v == separator then done = true elseif done then @@ -5784,16 +7410,17 @@ function environment.reconstruct_commandline(arg,noquote) if noquote and #arg == 1 then local a = arg[1] a = resolvers.resolve(a) - a = a:unquote() + a = unquote(a) return a - elseif next(arg) then + elseif #arg > 0 then local result = { } - for _,a in ipairs(arg) do -- ipairs 1 .. #n + for i=1,#arg do + local a = arg[i] a = resolvers.resolve(a) - a = a:unquote() - a = a:gsub('"','\\"') -- tricky - if a:find(" ") then - result[#result+1] = a:quote() + a = unquote(a) + a = gsub(a,'"','\\"') -- tricky + if find(a," ") then + result[#result+1] = quote(a) else result[#result+1] = a end @@ -5806,17 +7433,18 @@ end if arg then - -- new, reconstruct quoted snippets (maybe better just remnove the " then and add them later) + -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) local newarg, instring = { }, false - for index, argument in ipairs(arg) do - if argument:find("^\"") then - newarg[#newarg+1] = argument:gsub("^\"","") - if not argument:find("\"$") then + for index=1,#arg do + local argument = arg[index] + if find(argument,"^\"") then + newarg[#newarg+1] = gsub(argument,"^\"","") + if not find(argument,"\"$") then instring = true end - elseif argument:find("\"$") then - newarg[#newarg] = newarg[#newarg] .. " " .. argument:gsub("\"$","") + elseif find(argument,"\"$") then + newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","") instring = false elseif instring then newarg[#newarg] = newarg[#newarg] .. " " .. argument @@ -5871,12 +7499,12 @@ function environment.luafilechunk(filename) -- used for loading lua bytecode in filename = file.replacesuffix(filename, "lua") local fullname = environment.luafile(filename) if fullname and fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading file %s", fullname) end return environment.loadedluacode(fullname) else - if trace_verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end return nil @@ -5896,7 +7524,7 @@ function environment.loadluafile(filename, version) -- when not overloaded by explicit suffix we look for a luc file first local fullname = (lucname and environment.luafile(lucname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check @@ -5914,7 +7542,7 @@ function environment.loadluafile(filename, version) if v == version then return true else - if trace_verbose then + if trace_locating then logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version) end environment.loadluafile(filename) @@ -5925,12 +7553,12 @@ function environment.loadluafile(filename, version) end fullname = (luaname and environment.luafile(luaname)) or "" if fullname ~= "" then - if trace_verbose then + if trace_locating then logs.report("fileio","loading %s", fullname) end chunk = loadfile(fullname) -- this way we don't need a file exists check if not chunk then - if verbose then + if trace_locating then logs.report("fileio","unknown file %s", filename) end else @@ -5948,7 +7576,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['trac-inf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-inf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -5973,6 +7601,14 @@ function statistics.hastimer(instance) return instance and instance.starttime end +function statistics.resettiming(instance) + if not instance then + notimer = { timing = 0, loadtime = 0 } + else + instance.timing, instance.loadtime = 0, 0 + end +end + function statistics.starttiming(instance) if not instance then notimer = { } @@ -5987,6 +7623,8 @@ function statistics.starttiming(instance) if not instance.loadtime then instance.loadtime = 0 end + else +--~ logs.report("system","nested timing (%s)",tostring(instance)) end instance.timing = it + 1 end @@ -6032,6 +7670,12 @@ function statistics.elapsedindeed(instance) return t > statistics.threshold end +function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds + if statistics.elapsedindeed(instance) then + return format("%s seconds %s", statistics.elapsedtime(instance),rest or "") + end +end + -- general function function statistics.register(tag,fnc) @@ -6110,14 +7754,32 @@ function statistics.timed(action,report) report("total runtime: %s",statistics.elapsedtime(timer)) end +-- where, not really the best spot for this: + +commands = commands or { } + +local timer + +function commands.resettimer() + statistics.resettiming(timer) + statistics.starttiming(timer) +end + +function commands.elapsedtime() + statistics.stoptiming(timer) + tex.sprint(statistics.elapsedtime(timer)) +end + +commands.resettimer() + end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['luat-log'] = { +if not modules then modules = { } end modules ['trac-log'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to trac-log.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -6125,7 +7787,11 @@ if not modules then modules = { } end modules ['luat-log'] = { -- this is old code that needs an overhaul -local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format +--~ io.stdout:setvbuf("no") +--~ io.stderr:setvbuf("no") + +local write_nl, write = texio.write_nl or print, texio.write or io.write +local format, gmatch = string.format, string.gmatch local texcount = tex and tex.count if texlua then @@ -6206,25 +7872,48 @@ function logs.tex.line(fmt,...) -- new end end +--~ function logs.tex.start_page_number() +--~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +--~ if real > 0 then +--~ if user > 0 then +--~ if sub > 0 then +--~ write(format("[%s.%s.%s",real,user,sub)) +--~ else +--~ write(format("[%s.%s",real,user)) +--~ end +--~ else +--~ write(format("[%s",real)) +--~ end +--~ else +--~ write("[-") +--~ end +--~ end + +--~ function logs.tex.stop_page_number() +--~ write("]") +--~ end + +local real, user, sub + function logs.tex.start_page_number() - local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno + real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno +end + +function logs.tex.stop_page_number() if real > 0 then if user > 0 then if sub > 0 then - write(format("[%s.%s.%s",real,user,sub)) + logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub) else - write(format("[%s.%s",real,user)) + logs.report("pages", "flushing realpage %s, userpage %s",real,user) end else - write(format("[%s",real)) + logs.report("pages", "flushing realpage %s",real) end else - write("[-") + logs.report("pages", "flushing page") end -end - -function logs.tex.stop_page_number() - write("]") + io.flush() end logs.tex.report_job_stat = statistics.show_job_stat @@ -6324,7 +8013,7 @@ end function logs.setprogram(_name_,_banner_,_verbose_) name, banner = _name_, _banner_ if _verbose_ then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") end logs.set_method("tex") logs.report = report -- also used in libraries @@ -6337,9 +8026,9 @@ end function logs.setverbose(what) if what then - trackers.enable("resolvers.verbose") + trackers.enable("resolvers.locating") else - trackers.disable("resolvers.verbose") + trackers.disable("resolvers.locating") end logs.verbose = what or false end @@ -6356,7 +8045,7 @@ logs.report = logs.tex.report logs.simple = logs.tex.report function logs.reportlines(str) -- todo: - for line in str:gmatch("(.-)[\n\r]") do + for line in gmatch(str,"(.-)[\n\r]") do logs.report(line) end end @@ -6367,8 +8056,12 @@ end logs.simpleline = logs.reportline -function logs.help(message,option) +function logs.reportbanner() -- for scripts too logs.report(banner) +end + +function logs.help(message,option) + logs.reportbanner() logs.reportline() logs.reportlines(message) local moreinfo = logs.moreinfo or "" @@ -6400,6 +8093,11 @@ end --~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123") --~ end +function logs.fatal(where,...) + logs.report(where,"fatal error: %s, aborting now",format(...)) + os.exit() +end + end -- of closure @@ -6407,10 +8105,10 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", - comment = "companion to luat-lib.tex", } -- After a few years using the code the large luat-inp.lua file @@ -6422,7 +8120,7 @@ if not modules then modules = { } end modules ['data-inp'] = { -- * some public auxiliary functions were made private -- -- TODO: os.getenv -> os.env[] --- TODO: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller) +-- TODO: instances.[hashes,cnffiles,configurations,522] -- TODO: check escaping in find etc, too much, too slow -- This lib is multi-purpose and can be loaded again later on so that @@ -6443,12 +8141,13 @@ if not modules then modules = { } end modules ['data-inp'] = { local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys local next, type = next, type +local lpegmatch = lpeg.match -local trace_locating, trace_detail, trace_verbose = false, false, false +local trace_locating, trace_detail, trace_expansions = false, false, false -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) -trackers.register("resolvers.detail", function(v) trace_detail = v trackers.enable("resolvers.verbose,resolvers.detail") end) +trackers.register("resolvers.locating", function(v) trace_locating = v end) +trackers.register("resolvers.details", function(v) trace_detail = v end) +trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo if not resolvers then resolvers = { @@ -6472,7 +8171,7 @@ resolvers.generators.notfound = { nil } resolvers.cacheversion = '1.0.1' resolvers.cnfname = 'texmf.cnf' resolvers.luaname = 'texmfcnf.lua' -resolvers.homedir = os.env[os.platform == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' +resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~' resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}' local dummy_path_expr = "^!*unset/*$" @@ -6514,8 +8213,8 @@ suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' } alternatives['map files'] = 'map' alternatives['enc files'] = 'enc' -alternatives['cid files'] = 'cid' -alternatives['fea files'] = 'fea' +alternatives['cid maps'] = 'cid' -- great, why no cid files +alternatives['font feature files'] = 'fea' -- and fea files here alternatives['opentype fonts'] = 'otf' alternatives['truetype fonts'] = 'ttf' alternatives['truetype collections'] = 'ttc' @@ -6531,6 +8230,11 @@ formats ['sfd'] = 'SFDFONTS' suffixes ['sfd'] = { 'sfd' } alternatives['subfont definition files'] = 'sfd' +-- lib paths + +formats ['lib'] = 'CLUAINPUTS' -- new (needs checking) +suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' } + -- In practice we will work within one tds tree, but i want to keep -- the option open to build tools that look at multiple trees, which is -- why we keep the tree specific data in a table. We used to pass the @@ -6653,8 +8357,10 @@ local function check_configuration() -- not yet ok, no time for debugging now -- bad luck end fix("LUAINPUTS" , ".;$TEXINPUTS;$TEXMFSCRIPTS") -- no progname, hm - fix("FONTFEATURES", ".;$TEXMF/fonts/fea//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") - fix("FONTCIDMAPS" , ".;$TEXMF/fonts/cid//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- this will go away some day + fix("FONTFEATURES", ".;$TEXMF/fonts/{data,fea}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + fix("FONTCIDMAPS" , ".;$TEXMF/fonts/{data,cid}//;$OPENTYPEFONTS;$TTFONTS;$T1FONTS;$AFMFONTS") + -- fix("LUATEXLIBS" , ".;$TEXMF/luatex/lua//") end @@ -6669,7 +8375,7 @@ function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail' end end -resolvers.settrace(os.getenv("MTX.resolvers.TRACE") or os.getenv("MTX_INPUT_TRACE")) +resolvers.settrace(os.getenv("MTX_INPUT_TRACE")) function resolvers.osenv(key) local ie = instance.environment @@ -6757,37 +8463,43 @@ end -- work that well; the parsing is ok, but dealing with the resulting -- table is a pain because we need to work inside-out recursively +local function do_first(a,b) + local t = { } + for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end + return "{" .. concat(t,",") .. "}" +end + +local function do_second(a,b) + local t = { } + for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end + return "{" .. concat(t,",") .. "}" +end + +local function do_both(a,b) + local t = { } + for sa in gmatch(a,"[^,]+") do + for sb in gmatch(b,"[^,]+") do + t[#t+1] = sa .. sb + end + end + return "{" .. concat(t,",") .. "}" +end + +local function do_three(a,b,c) + return a .. b.. c +end + local function splitpathexpr(str, t, validate) -- no need for further optimization as it is only called a - -- few times, we can use lpeg for the sub; we could move - -- the local functions outside the body + -- few times, we can use lpeg for the sub + if trace_expansions then + logs.report("fileio","expanding variable '%s'",str) + end t = t or { } str = gsub(str,",}",",@}") str = gsub(str,"{,","{@,") -- str = "@" .. str .. "@" local ok, done - local function do_first(a,b) - local t = { } - for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end - return "{" .. concat(t,",") .. "}" - end - local function do_second(a,b) - local t = { } - for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end - return "{" .. concat(t,",") .. "}" - end - local function do_both(a,b) - local t = { } - for sa in gmatch(a,"[^,]+") do - for sb in gmatch(b,"[^,]+") do - t[#t+1] = sa .. sb - end - end - return "{" .. concat(t,",") .. "}" - end - local function do_three(a,b,c) - return a .. b.. c - end while true do done = false while true do @@ -6818,6 +8530,11 @@ local function splitpathexpr(str, t, validate) t[#t+1] = s end end + if trace_expansions then + for k=1,#t do + logs.report("fileio","% 4i: %s",k,t[k]) + end + end return t end @@ -6857,18 +8574,27 @@ end -- also we now follow the stupid route: if not set then just assume *one* -- cnf file under texmf (i.e. distribution) -resolvers.ownpath = resolvers.ownpath or nil -resolvers.ownbin = resolvers.ownbin or arg[-2] or arg[-1] or arg[0] or "luatex" -resolvers.autoselfdir = true -- false may be handy for debugging +local args = environment and environment.original_arguments or arg -- this needs a cleanup + +resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" +resolvers.ownbin = gsub(resolvers.ownbin,"\\","/") function resolvers.getownpath() - if not resolvers.ownpath then - if resolvers.autoselfdir and os.selfdir then - resolvers.ownpath = os.selfdir - else - local binary = resolvers.ownbin - if os.platform == "windows" then - binary = file.replacesuffix(binary,"exe") + local ownpath = resolvers.ownpath or os.selfdir + if not ownpath or ownpath == "" or ownpath == "unset" then + ownpath = args[-1] or arg[-1] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + if not ownpath or ownpath == "" then + ownpath = args[-0] or arg[-0] + ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/")) + end + local binary = resolvers.ownbin + if not ownpath or ownpath == "" then + ownpath = ownpath and file.dirname(binary) + end + if not ownpath or ownpath == "" then + if os.binsuffix ~= "" then + binary = file.replacesuffix(binary,os.binsuffix) end for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do local b = file.join(p,binary) @@ -6880,30 +8606,39 @@ function resolvers.getownpath() local olddir = lfs.currentdir() if lfs.chdir(p) then local pp = lfs.currentdir() - if trace_verbose and p ~= pp then - logs.report("fileio","following symlink %s to %s",p,pp) + if trace_locating and p ~= pp then + logs.report("fileio","following symlink '%s' to '%s'",p,pp) end - resolvers.ownpath = pp + ownpath = pp lfs.chdir(olddir) else - if trace_verbose then - logs.report("fileio","unable to check path %s",p) + if trace_locating then + logs.report("fileio","unable to check path '%s'",p) end - resolvers.ownpath = p + ownpath = p end break end end end - if not resolvers.ownpath then resolvers.ownpath = '.' end + if not ownpath or ownpath == "" then + ownpath = "." + logs.report("fileio","forcing fallback ownpath .") + elseif trace_locating then + logs.report("fileio","using ownpath '%s'",ownpath) + end end - return resolvers.ownpath + resolvers.ownpath = ownpath + function resolvers.getownpath() + return resolvers.ownpath + end + return ownpath end local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" } local function identify_own() - local ownpath = resolvers.getownpath() or lfs.currentdir() + local ownpath = resolvers.getownpath() or dir.current() local ie = instance.environment if ownpath then if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end @@ -6916,10 +8651,10 @@ local function identify_own() if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end - if trace_verbose then + if trace_locating then for i=1,#own_places do local v = own_places[i] - logs.report("fileio","variable %s set to %s",v,resolvers.env(v) or "unknown") + logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown") end end identify_own = function() end @@ -6951,10 +8686,8 @@ end local function load_cnf_file(fname) fname = resolvers.clean_path(fname) local lname = file.replacesuffix(fname,'lua') - local f = io.open(lname) - if f then -- this will go - f:close() - local dname = file.dirname(fname) + if lfs.isfile(lname) then + local dname = file.dirname(fname) -- fname ? if not instance.configuration[dname] then resolvers.load_data(dname,'configuration',lname and file.basename(lname)) instance.order[#instance.order+1] = instance.configuration[dname] @@ -6962,8 +8695,8 @@ local function load_cnf_file(fname) else f = io.open(fname) if f then - if trace_verbose then - logs.report("fileio","loading %s", fname) + if trace_locating then + logs.report("fileio","loading configuration file %s", fname) end local line, data, n, k, v local dname = file.dirname(fname) @@ -6997,14 +8730,16 @@ local function load_cnf_file(fname) end end f:close() - elseif trace_verbose then - logs.report("fileio","skipping %s", fname) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'", fname) end end end local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared) - for _,c in ipairs(instance.order) do + local order = instance.order + for i=1,#order do + local c = order[i] for k,v in next, c do if not instance.variables[k] then if instance.environment[k] then @@ -7020,19 +8755,24 @@ end function resolvers.load_cnf() local function loadoldconfigdata() - for _, fname in ipairs(instance.cnffiles) do - load_cnf_file(fname) + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + load_cnf_file(cnffiles[i]) end end -- instance.cnffiles contain complete names now ! + -- we still use a funny mix of cnf and new but soon + -- we will switch to lua exclusively as we only use + -- the file to collect the tree roots if #instance.cnffiles == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)") end else - instance.rootpath = instance.cnffiles[1] - for k,fname in ipairs(instance.cnffiles) do - instance.cnffiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local cnffiles = instance.cnffiles + instance.rootpath = cnffiles[1] + for k=1,#cnffiles do + instance.cnffiles[k] = file.collapse_path(cnffiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -7060,8 +8800,9 @@ function resolvers.load_lua() -- yet harmless else instance.rootpath = instance.luafiles[1] - for k,fname in ipairs(instance.luafiles) do - instance.luafiles[k] = file.collapse_path(gsub(fname,"\\",'/')) + local luafiles = instance.luafiles + for k=1,#luafiles do + instance.luafiles[k] = file.collapse_path(luafiles[k]) end for i=1,3 do instance.rootpath = file.dirname(instance.rootpath) @@ -7093,14 +8834,14 @@ end function resolvers.append_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash append: %s",tag) + logs.report("fileio","hash '%s' appended",tag) end insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } ) end function resolvers.prepend_hash(type,tag,name) if trace_locating then - logs.report("fileio","= hash prepend: %s",tag) + logs.report("fileio","hash '%s' prepended",tag) end insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } ) end @@ -7124,9 +8865,11 @@ end -- locators function resolvers.locatelists() - for _, path in ipairs(resolvers.clean_path_list('TEXMF')) do - if trace_verbose then - logs.report("fileio","locating list of %s",path) + local texmfpaths = resolvers.clean_path_list('TEXMF') + for i=1,#texmfpaths do + local path = texmfpaths[i] + if trace_locating then + logs.report("fileio","locating list of '%s'",path) end resolvers.locatedatabase(file.collapse_path(path)) end @@ -7139,11 +8882,11 @@ end function resolvers.locators.tex(specification) if specification and specification ~= '' and lfs.isdir(specification) then if trace_locating then - logs.report("fileio",'! tex locator found: %s',specification) + logs.report("fileio","tex locator '%s' found",specification) end resolvers.append_hash('file',specification,filename) elseif trace_locating then - logs.report("fileio",'? tex locator not found: %s',specification) + logs.report("fileio","tex locator '%s' not found",specification) end end @@ -7157,7 +8900,9 @@ function resolvers.loadfiles() instance.loaderror = false instance.files = { } if not instance.renewcache then - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for k=1,#hashes do + local hash = hashes[k] resolvers.hashdatabase(hash.tag,hash.name) if instance.loaderror then break end end @@ -7171,8 +8916,9 @@ end -- generators: function resolvers.loadlists() - for _, hash in ipairs(instance.hashes) do - resolvers.generatedatabase(hash.tag) + local hashes = instance.hashes + for i=1,#hashes do + resolvers.generatedatabase(hashes[i].tag) end end @@ -7184,10 +8930,27 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.patterns.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification - if trace_verbose then - logs.report("fileio","scanning path %s",specification) + if trace_locating then + logs.report("fileio","scanning path '%s'",specification) end instance.files[tag] = { } local files = instance.files[tag] @@ -7203,7 +8966,8 @@ function resolvers.generators.tex(specification) full = spec end for name in directory(full) do - if not weird:match(name) then + if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then @@ -7236,7 +9000,7 @@ function resolvers.generators.tex(specification) end end action() - if trace_verbose then + if trace_locating then logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r) end end @@ -7251,11 +9015,48 @@ end -- we join them and split them after the expansion has taken place. This -- is more convenient. +--~ local checkedsplit = string.checkedsplit + +local cache = { } + +local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;"))) + +local function split_kpse_path(str) -- beware, this can be either a path or a {specification} + local found = cache[str] + if not found then + if str == "" then + found = { } + else + str = gsub(str,"\\","/") +--~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator) +local split = lpegmatch(splitter,str) + found = { } + for i=1,#split do + local s = split[i] + if not find(s,"^{*unset}*") then + found[#found+1] = s + end + end + if trace_expansions then + logs.report("fileio","splitting path specification '%s'",str) + for k=1,#found do + logs.report("fileio","% 4i: %s",k,found[k]) + end + end + cache[str] = found + end + end + return found +end + +resolvers.split_kpse_path = split_kpse_path + function resolvers.splitconfig() - for i,c in ipairs(instance) do - for k,v in pairs(c) do + for i=1,#instance do + local c = instance[i] + for k,v in next, c do if type(v) == 'string' then - local t = file.split_path(v) + local t = split_kpse_path(v) if #t > 1 then c[k] = t end @@ -7265,21 +9066,25 @@ function resolvers.splitconfig() end function resolvers.joinconfig() - for i,c in ipairs(instance.order) do - for k,v in pairs(c) do -- ipairs? + local order = instance.order + for i=1,#order do + local c = order[i] + for k,v in next, c do -- indexed? if type(v) == 'table' then c[k] = file.join_path(v) end end end end + function resolvers.split_path(str) if type(str) == 'table' then return str else - return file.split_path(str) + return split_kpse_path(str) end end + function resolvers.join_path(str) if type(str) == 'table' then return file.join_path(str) @@ -7291,8 +9096,9 @@ end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do - local t, h = { }, { } - for _,vv in ipairs(file.split_path(v)) do + local t, h, p = { }, { }, split_kpse_path(v) + for kk=1,#p do + local vv = p[kk] if vv ~= "" and not h[vv] then t[#t+1] = vv h[vv] = true @@ -7339,11 +9145,15 @@ function resolvers.serialize(files) end t[#t+1] = "return {" if instance.sortdata then - for _, k in pairs(sortedkeys(files)) do -- ipairs + local sortedfiles = sortedkeys(files) + for i=1,#sortedfiles do + local k = sortedfiles[i] local fk = files[k] if type(fk) == 'table' then t[#t+1] = "\t['" .. k .. "']={" - for _, kk in pairs(sortedkeys(fk)) do -- ipairs + local sortedfk = sortedkeys(fk) + for j=1,#sortedfk do + local kk = sortedfk[j] t[#t+1] = dump(kk,fk[kk],"\t\t") end t[#t+1] = "\t}," @@ -7368,12 +9178,18 @@ function resolvers.serialize(files) return concat(t,"\n") end +local data_state = { } + +function resolvers.data_state() + return data_state or { } +end + function resolvers.save_data(dataname, makename) -- untested without cache overload for cachename, files in next, instance[dataname] do local name = (makename or file.join)(cachename,dataname) local luaname, lucname = name .. ".lua", name .. ".luc" - if trace_verbose then - logs.report("fileio","preparing %s for %s",dataname,cachename) + if trace_locating then + logs.report("fileio","preparing '%s' for '%s'",dataname,cachename) end for k, v in next, files do if type(v) == "table" and #v == 1 then @@ -7387,24 +9203,25 @@ function resolvers.save_data(dataname, makename) -- untested without cache overl date = os.date("%Y-%m-%d"), time = os.date("%H:%M:%S"), content = files, + uuid = os.uuid(), } local ok = io.savedata(luaname,resolvers.serialize(data)) if ok then - if trace_verbose then - logs.report("fileio","%s saved in %s",dataname,luaname) + if trace_locating then + logs.report("fileio","'%s' saved in '%s'",dataname,luaname) end if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip - if trace_verbose then - logs.report("fileio","%s compiled to %s",dataname,lucname) + if trace_locating then + logs.report("fileio","'%s' compiled to '%s'",dataname,lucname) end else - if trace_verbose then - logs.report("fileio","compiling failed for %s, deleting file %s",dataname,lucname) + if trace_locating then + logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname) end os.remove(lucname) end - elseif trace_verbose then - logs.report("fileio","unable to save %s in %s (access error)",dataname,luaname) + elseif trace_locating then + logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname) end end end @@ -7416,19 +9233,20 @@ function resolvers.load_data(pathname,dataname,filename,makename) -- untested wi if blob then local data = blob() if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then - if trace_verbose then - logs.report("fileio","loading %s for %s from %s",dataname,pathname,filename) + data_state[#data_state+1] = data.uuid + if trace_locating then + logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = data.content else - if trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + if trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end instance[dataname][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping %s for %s from %s",dataname,pathname,filename) + elseif trace_locating then + logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename) end end @@ -7447,15 +9265,17 @@ function resolvers.resetconfig() end function resolvers.loadnewconfig() - for _, cnf in ipairs(instance.luafiles) do + local luafiles = instance.luafiles + for i=1,#luafiles do + local cnf = luafiles[i] local pathname = file.dirname(cnf) local filename = file.join(pathname,resolvers.luaname) local blob = loadfile(filename) if blob then local data = blob() if data then - if trace_verbose then - logs.report("fileio","loading configuration file %s",filename) + if trace_locating then + logs.report("fileio","loading configuration file '%s'",filename) end if true then -- flatten to variable.progname @@ -7476,14 +9296,14 @@ function resolvers.loadnewconfig() instance['setup'][pathname] = data end else - if trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + if trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance['setup'][pathname] = { } instance.loaderror = true end - elseif trace_verbose then - logs.report("fileio","skipping configuration file %s",filename) + elseif trace_locating then + logs.report("fileio","skipping configuration file '%s'",filename) end instance.order[#instance.order+1] = instance.setup[pathname] if instance.loaderror then break end @@ -7492,7 +9312,9 @@ end function resolvers.loadoldconfig() if not instance.renewcache then - for _, cnf in ipairs(instance.cnffiles) do + local cnffiles = instance.cnffiles + for i=1,#cnffiles do + local cnf = cnffiles[i] local dname = file.dirname(cnf) resolvers.load_data(dname,'configuration') instance.order[#instance.order+1] = instance.configuration[dname] @@ -7682,7 +9504,7 @@ end function resolvers.expanded_path_list(str) if not str then - return ep or { } + return ep or { } -- ep ? elseif instance.savelists then -- engine+progname hash str = gsub(str,"%$","") @@ -7700,9 +9522,9 @@ end function resolvers.expanded_path_list_from_var(str) -- brrr local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$","")) if tmp ~= "" then - return resolvers.expanded_path_list(str) - else return resolvers.expanded_path_list(tmp) + else + return resolvers.expanded_path_list(str) end end @@ -7749,9 +9571,9 @@ function resolvers.isreadable.file(name) local readable = lfs.isfile(name) -- brrr if trace_detail then if readable then - logs.report("fileio","+ readable: %s",name) + logs.report("fileio","file '%s' is readable",name) else - logs.report("fileio","- readable: %s", name) + logs.report("fileio","file '%s' is not readable", name) end end return readable @@ -7767,7 +9589,7 @@ local function collect_files(names) for k=1,#names do local fname = names[k] if trace_detail then - logs.report("fileio","? blobpath asked: %s",fname) + logs.report("fileio","checking name '%s'",fname) end local bname = file.basename(fname) local dname = file.dirname(fname) @@ -7783,7 +9605,7 @@ local function collect_files(names) local files = blobpath and instance.files[blobpath] if files then if trace_detail then - logs.report("fileio",'? blobpath do: %s (%s)',blobpath,bname) + logs.report("fileio","deep checking '%s' (%s)",blobpath,bname) end local blobfile = files[bname] if not blobfile then @@ -7817,7 +9639,7 @@ local function collect_files(names) end end elseif trace_locating then - logs.report("fileio",'! blobpath no: %s (%s)',blobpath,bname) + logs.report("fileio","no match in '%s' (%s)",blobpath,bname) end end end @@ -7867,14 +9689,13 @@ end local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc) local result = collected or { } local stamp = nil - filename = file.collapse_path(filename) -- elsewhere - filename = file.collapse_path(gsub(filename,"\\","/")) -- elsewhere + filename = file.collapse_path(filename) -- speed up / beware: format problem if instance.remember then stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format if instance.found[stamp] then if trace_locating then - logs.report("fileio",'! remembered: %s',filename) + logs.report("fileio","remembering file '%s'",filename) end return instance.found[stamp] end @@ -7882,7 +9703,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not dangerous[instance.format or "?"] then if resolvers.isreadable.file(filename) then if trace_detail then - logs.report("fileio",'= found directly: %s',filename) + logs.report("fileio","file '%s' found directly",filename) end instance.found[stamp] = { filename } return { filename } @@ -7890,13 +9711,13 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end if find(filename,'%*') then if trace_locating then - logs.report("fileio",'! wildcard: %s', filename) + logs.report("fileio","checking wildcard '%s'", filename) end result = resolvers.find_wildcard_files(filename) elseif file.is_qualified_path(filename) then if resolvers.isreadable.file(filename) then if trace_locating then - logs.report("fileio",'! qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end result = { filename } else @@ -7906,7 +9727,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. ".tex" if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing standard filetype: tex') + logs.report("fileio","no suffix, forcing standard filetype 'tex'") end result, ok = { forcedname }, true end @@ -7916,7 +9737,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan forcedname = filename .. "." .. s if resolvers.isreadable.file(forcedname) then if trace_locating then - logs.report("fileio",'! no suffix, forcing format filetype: %s', s) + logs.report("fileio","no suffix, forcing format filetype '%s'", s) end result, ok = { forcedname }, true break @@ -7928,7 +9749,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- try to find in tree (no suffix manipulation), here we search for the -- matching last part of the name local basename = file.basename(filename) - local pattern = (filename .. "$"):gsub("([%.%-])","%%%1") + local pattern = gsub(filename .. "$","([%.%-])","%%%1") local savedformat = instance.format local format = savedformat or "" if format == "" then @@ -7938,19 +9759,21 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan instance.format = "othertextfiles" -- kind of everything, maybe texinput is better end -- - local resolved = collect_instance_files(basename) - if #result == 0 then - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered) + if basename ~= filename then + local resolved = collect_instance_files(basename) + if #result == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered) + end end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if rr:find(pattern) then - result[#result+1], ok = rr, true + resolvers.format = savedformat + -- + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1], ok = rr, true + end end end -- a real wildcard: @@ -7959,14 +9782,14 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan -- local filelist = collect_files({basename}) -- for f=1,#filelist do -- local ff = filelist[f][3] or "" - -- if ff:find(pattern) then + -- if find(ff,pattern) then -- result[#result+1], ok = ff, true -- end -- end -- end end if not ok and trace_locating then - logs.report("fileio",'? qualified: %s', filename) + logs.report("fileio","qualified name '%s'", filename) end end else @@ -7985,12 +9808,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan wantedfiles[#wantedfiles+1] = forcedname filetype = resolvers.format_of_suffix(forcedname) if trace_locating then - logs.report("fileio",'! forcing filetype: %s',filetype) + logs.report("fileio","forcing filetype '%s'",filetype) end else filetype = resolvers.format_of_suffix(filename) if trace_locating then - logs.report("fileio",'! using suffix based filetype: %s',filetype) + logs.report("fileio","using suffix based filetype '%s'",filetype) end end else @@ -8002,7 +9825,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan end filetype = instance.format if trace_locating then - logs.report("fileio",'! using given filetype: %s',filetype) + logs.report("fileio","using given filetype '%s'",filetype) end end local typespec = resolvers.variable_of_format(filetype) @@ -8010,9 +9833,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then - logs.report("fileio",'? filename: %s',filename) - logs.report("fileio",'? filetype: %s',filetype or '?') - logs.report("fileio",'? wanted files: %s',concat(wantedfiles," | ")) + logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end for k=1,#wantedfiles do local fname = wantedfiles[k] @@ -8033,36 +9854,59 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan else -- list search local filelist = collect_files(wantedfiles) - local doscan, recurse + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = file.dirname(filelist[i][2]) .. "/" + end + end if trace_detail then - logs.report("fileio",'? filename: %s',filename) + logs.report("fileio","checking filename '%s'",filename) end -- a bit messy ... esp the doscan setting here + local doscan for k=1,#pathlist do local path = pathlist[k] if find(path,"^!!") then doscan = false else doscan = true end - if find(path,"//$") then recurse = true else recurse = false end local pathname = gsub(path,"^!+", '') done = false -- using file list - if filelist and not (done and not instance.allresults) and recurse then - -- compare list entries with permitted pattern - pathname = gsub(pathname,"([%-%.])","%%%1") -- this also influences - pathname = gsub(pathname,"/+$", '/.*') -- later usage of pathname - pathname = gsub(pathname,"//", '/.-/') -- not ok for /// but harmless - local expr = "^" .. pathname + if filelist then + local expression + -- compare list entries with permitted pattern -- /xx /xx// + if not find(pathname,"/$") then + expression = pathname .. "/" + else + expression = pathname + end + expression = gsub(expression,"([%-%.])","%%%1") -- this also influences + expression = gsub(expression,"//+$", '/.*') -- later usage of pathname + expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless + expression = "^" .. expression .. "$" + if trace_detail then + logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname) + end for k=1,#filelist do local fl = filelist[k] local f = fl[2] - if find(f,expr) then - if trace_detail then - logs.report("fileio",'= found in hash: %s',f) - end + local d = dirlist[k] + if find(d,expression) then --- todo, test for readable result[#result+1] = fl[3] resolvers.register_in_trees(f) -- for tracing used files done = true - if not instance.allresults then break end + if instance.allresults then + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d) + end + else + if trace_detail then + logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d) + end + break + end + elseif trace_detail then + logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d) end end end @@ -8078,7 +9922,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan local fname = file.join(ppname,w) if resolvers.isreadable.file(fname) then if trace_detail then - logs.report("fileio",'= found by scanning: %s',fname) + logs.report("fileio","found '%s' by scanning",fname) end result[#result+1] = fname done = true @@ -8141,7 +9985,7 @@ function resolvers.find_given_files(filename) local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] - local files = instance.files[hash.tag] + local files = instance.files[hash.tag] or { } local blist = files[bname] if not blist then local rname = "remap:"..bname @@ -8251,9 +10095,9 @@ function resolvers.load(option) statistics.starttiming(instance) resolvers.resetconfig() resolvers.identify_cnf() - resolvers.load_lua() + resolvers.load_lua() -- will become the new method resolvers.expand_variables() - resolvers.load_cnf() + resolvers.load_cnf() -- will be skipped when we have a lua file resolvers.expand_variables() if option ~= "nofiles" then resolvers.load_hash() @@ -8265,22 +10109,23 @@ end function resolvers.for_files(command, files, filetype, mustexist) if files and #files > 0 then local function report(str) - if trace_verbose then + if trace_locating then logs.report("fileio",str) -- has already verbose else print(str) end end - if trace_verbose then - report('') + if trace_locating then + report('') -- ? end - for _, file in ipairs(files) do + for f=1,#files do + local file = files[f] local result = command(file,filetype,mustexist) if type(result) == 'string' then report(result) else - for _,v in ipairs(result) do - report(v) + for i=1,#result do + report(result[i]) -- could be unpack end end end @@ -8327,18 +10172,19 @@ end function table.sequenced(t,sep) -- temp here local s = { } - for k, v in pairs(t) do -- pairs? - s[#s+1] = k .. "=" .. v + for k, v in next, t do -- indexed? + s[#s+1] = k .. "=" .. tostring(v) end return concat(s, sep or " | ") end function resolvers.methodhandler(what, filename, filetype) -- ... + filename = file.collapse_path(filename) local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb local scheme = specification.scheme if resolvers[what][scheme] then if trace_locating then - logs.report("fileio",'= handler: %s -> %s -> %s',specification.original,what,table.sequenced(specification)) + logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) end return resolvers[what][scheme](filename,filetype) -- todo: specification else @@ -8358,8 +10204,9 @@ function resolvers.clean_path(str) end function resolvers.do_with_path(name,func) - for _, v in pairs(resolvers.expanded_path_list(name)) do -- pairs? - func("^"..resolvers.clean_path(v)) + local pathlist = resolvers.expanded_path_list(name) + for i=1,#pathlist do + func("^"..resolvers.clean_path(pathlist[i])) end end @@ -8368,7 +10215,9 @@ function resolvers.do_with_var(name,func) end function resolvers.with_files(pattern,handle) - for _, hash in ipairs(instance.hashes) do + local hashes = instance.hashes + for i=1,#hashes do + local hash = hashes[i] local blobpath = hash.tag local blobtype = hash.type if blobpath then @@ -8383,7 +10232,7 @@ function resolvers.with_files(pattern,handle) if type(v) == "string" then handle(blobtype,blobpath,v,k) else - for _,vv in pairs(v) do -- ipairs? + for _,vv in next, v do -- indexed handle(blobtype,blobpath,vv,k) end end @@ -8395,7 +10244,7 @@ function resolvers.with_files(pattern,handle) end function resolvers.locate_format(name) - local barename, fmtname = name:gsub("%.%a+$",""), "" + local barename, fmtname = gsub(name,"%.%a+$",""), "" if resolvers.usecache then local path = file.join(caches.setpath("formats")) -- maybe platform fmtname = file.join(path,barename..".fmt") or "" @@ -8443,7 +10292,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8467,7 +10316,7 @@ luatools with a recache feature.

local format, lower, gsub = string.format, string.lower, string.gsub -local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet caches = caches or { } @@ -8554,7 +10403,8 @@ function caches.setpath(...) caches.path = '.' end caches.path = resolvers.clean_path(caches.path) - if not table.is_empty({...}) then + local dirs = { ... } + if #dirs > 0 then local pth = dir.mkdirs(caches.path,...) return pth end @@ -8600,6 +10450,7 @@ function caches.savedata(filepath,filename,data,raw) if raw then reduce, simplify = false, false end + data.cache_uuid = os.uuid() if caches.direct then file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex else @@ -8625,7 +10476,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-res'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8660,6 +10511,14 @@ prefixes.relative = function(str,n) return resolvers.clean_path(str) end +prefixes.auto = function(str) + local fullname = prefixes.relative(str) + if not lfs.isfile(fullname) then + fullname = prefixes.locate(str) + end + return fullname +end + prefixes.locate = function(str) local fullname = resolvers.find_given_file(str) or "" return resolvers.clean_path((fullname ~= "" and fullname) or str) @@ -8683,6 +10542,16 @@ prefixes.full = prefixes.locate prefixes.file = prefixes.filename prefixes.path = prefixes.pathname +function resolvers.allprefixes(separator) + local all = table.sortedkeys(prefixes) + if separator then + for i=1,#all do + all[i] = all[i] .. ":" + end + end + return all +end + local function _resolve_(method,target) if prefixes[method] then return prefixes[method](target) @@ -8693,7 +10562,8 @@ end local function resolve(str) if type(str) == "table" then - for k, v in pairs(str) do -- ipairs + for k=1,#str do + local v = str[k] str[k] = resolve(v) or v end elseif str and str ~= "" then @@ -8706,7 +10576,7 @@ resolvers.resolve = resolve if os.uname then - for k, v in pairs(os.uname()) do + for k, v in next, os.uname() do if not prefixes[k] then prefixes[k] = function() return v end end @@ -8721,7 +10591,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8742,7 +10612,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-out'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8758,7 +10628,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-con'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -8769,8 +10639,6 @@ local format, lower, gsub = string.format, string.lower, string.gsub local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) --[[ldx--

Once we found ourselves defining similar cache constructs @@ -8834,7 +10702,7 @@ end function containers.is_valid(container, name) if name and name ~= "" then local storage = container.storage[name] - return storage and not table.is_empty(storage) and storage.cache_version == container.version + return storage and storage.cache_version == container.version else return false end @@ -8886,16 +10754,15 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-use'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, lower, gsub = string.format, string.lower, string.gsub +local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v trackers.enable("resolvers.verbose") end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -- since we want to use the cache instead of the tree, we will now -- reimplement the saver. @@ -8939,19 +10806,20 @@ resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT')) - if table.is_empty(mountpaths) and usecache then + if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = { caches.setpath("mount") } end - if not table.is_empty(mountpaths) then + if mountpaths and #mountpaths > 0 then statistics.starttiming(resolvers.instance) - for k, root in pairs(mountpaths) do + for k=1,#mountpaths do + local root = mountpaths[k] local f = io.open(root.."/url.tmi") if f then for line in f:lines() do if line then - if line:find("^[%%#%-]") then -- or %W + if find(line,"^[%%#%-]") then -- or %W -- skip - elseif line:find("^zip://") then + elseif find(line,"^zip://") then if trace_locating then logs.report("fileio","mounting %s",line) end @@ -8996,11 +10864,13 @@ function statistics.check_fmt_status(texname) local luv = dofile(luvname) if luv and luv.sourcefile then local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown") - if luv.enginebanner and luv.enginebanner ~= enginebanner then - return "engine mismatch" + local luvbanner = luv.enginebanner or "?" + if luvbanner ~= enginebanner then + return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner) end - if luv.sourcehash and luv.sourcehash ~= sourcehash then - return "source mismatch" + local luvhash = luv.sourcehash or "?" + if luvhash ~= sourcehash then + return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash) end else return "invalid status file" @@ -9019,18 +10889,22 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-zip'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local format, find = string.format, string.find +local format, find, match = string.format, string.find, string.match +local unpack = unpack or table.unpack -local trace_locating, trace_verbose = false, false +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -trackers.register("resolvers.verbose", function(v) trace_verbose = v end) -trackers.register("resolvers.locating", function(v) trace_locating = v trace_verbose = v end) +-- zip:///oeps.zip?name=bla/bla.tex +-- zip:///oeps.zip?tree=tex/texmf-local +-- zip:///texmf.zip?tree=/tex/texmf +-- zip:///texmf.zip?tree=/tex/texmf-local +-- zip:///texmf-mine.zip?tree=/tex/texmf-projects zip = zip or { } zip.archives = zip.archives or { } @@ -9041,9 +10915,6 @@ local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, local archives = zip.archives --- zip:///oeps.zip?name=bla/bla.tex --- zip:///oeps.zip?tree=tex/texmf-local - local function validzip(str) -- todo: use url splitter if not find(str,"^zip://") then return "zip:///" .. str @@ -9073,26 +10944,22 @@ function zip.closearchive(name) end end --- zip:///texmf.zip?tree=/tex/texmf --- zip:///texmf.zip?tree=/tex/texmf-local --- zip:///texmf-mine.zip?tree=/tex/texmf-projects - function locators.zip(specification) -- where is this used? startup zips (untested) specification = resolvers.splitmethod(specification) local zipfile = specification.path local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree if trace_locating then if zfile then - logs.report("fileio",'! zip locator, found: %s',specification.original) + logs.report("fileio","zip locator, archive '%s' found",specification.original) else - logs.report("fileio",'? zip locator, not found: %s',specification.original) + logs.report("fileio","zip locator, archive '%s' not found",specification.original) end end end function hashers.zip(tag,name) - if trace_verbose then - logs.report("fileio","loading zip file %s as %s",name,tag) + if trace_locating then + logs.report("fileio","loading zip file '%s' as '%s'",name,tag) end resolvers.usezipfile(format("%s?tree=%s",tag,name)) end @@ -9117,23 +10984,25 @@ function finders.zip(specification,filetype) local zfile = zip.openarchive(specification.path) if zfile then if trace_locating then - logs.report("fileio",'! zip finder, path: %s',specification.path) + logs.report("fileio","zip finder, archive '%s' found",specification.path) end local dfile = zfile:open(q.name) if dfile then dfile = zfile:close() if trace_locating then - logs.report("fileio",'+ zip finder, name: %s',q.name) + logs.report("fileio","zip finder, file '%s' found",q.name) end return specification.original + elseif trace_locating then + logs.report("fileio","zip finder, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'? zip finder, path %s',specification.path) + logs.report("fileio","zip finder, unknown archive '%s'",specification.path) end end end if trace_locating then - logs.report("fileio",'- zip finder, name: %s',filename) + logs.report("fileio","zip finder, '%s' not found",filename) end return unpack(finders.notfound) end @@ -9146,20 +11015,25 @@ function openers.zip(specification) local zfile = zip.openarchive(zipspecification.path) if zfile then if trace_locating then - logs.report("fileio",'+ zip starter, path: %s',zipspecification.path) + logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path) end local dfile = zfile:open(q.name) if dfile then logs.show_open(specification) + if trace_locating then + logs.report("fileio","zip opener, file '%s' found",q.name) + end return openers.text_opener(specification,dfile,'zip') + elseif trace_locating then + logs.report("fileio","zip opener, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'- zip starter, path %s',zipspecification.path) + logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path) end end end if trace_locating then - logs.report("fileio",'- zip opener, name: %s',filename) + logs.report("fileio","zip opener, '%s' not found",filename) end return unpack(openers.notfound) end @@ -9172,25 +11046,27 @@ function loaders.zip(specification) local zfile = zip.openarchive(specification.path) if zfile then if trace_locating then - logs.report("fileio",'+ zip starter, path: %s',specification.path) + logs.report("fileio","zip loader, archive '%s' opened",specification.path) end local dfile = zfile:open(q.name) if dfile then logs.show_load(filename) if trace_locating then - logs.report("fileio",'+ zip loader, name: %s',filename) + logs.report("fileio","zip loader, file '%s' loaded",filename) end local s = dfile:read("*all") dfile:close() return true, s, #s + elseif trace_locating then + logs.report("fileio","zip loader, file '%s' not found",q.name) end elseif trace_locating then - logs.report("fileio",'- zip starter, path: %s',specification.path) + logs.report("fileio","zip loader, unknown archive '%s'",specification.path) end end end if trace_locating then - logs.report("fileio",'- zip loader, name: %s',filename) + logs.report("fileio","zip loader, '%s' not found",filename) end return unpack(openers.notfound) end @@ -9200,21 +11076,15 @@ end function resolvers.usezipfile(zipname) zipname = validzip(zipname) - if trace_locating then - logs.report("fileio",'! zip use, file: %s',zipname) - end local specification = resolvers.splitmethod(zipname) local zipfile = specification.path if zipfile and not zip.registeredfiles[zipname] then local tree = url.query(specification.query).tree or "" - if trace_locating then - logs.report("fileio",'! zip register, file: %s',zipname) - end local z = zip.openarchive(zipfile) if z then local instance = resolvers.instance if trace_locating then - logs.report("fileio","= zipfile, registering: %s",zipname) + logs.report("fileio","zip registering, registering archive '%s'",zipname) end statistics.starttiming(instance) resolvers.prepend_hash('zip',zipname,zipfile) @@ -9223,10 +11093,10 @@ function resolvers.usezipfile(zipname) instance.files[zipname] = resolvers.register_zip_file(z,tree or "") statistics.stoptiming(instance) elseif trace_locating then - logs.report("fileio","? zipfile, unknown: %s",zipname) + logs.report("fileio","zip registering, unknown archive '%s'",zipname) end elseif trace_locating then - logs.report("fileio",'! zip register, no file: %s',zipname) + logs.report("fileio","zip registering, '%s' not found",zipname) end end @@ -9238,11 +11108,11 @@ function resolvers.register_zip_file(z,tree) filter = format("^%s/(.+)/(.-)$",tree) end if trace_locating then - logs.report("fileio",'= zip filter: %s',filter) + logs.report("fileio","zip registering, using filter '%s'",filter) end local register, n = resolvers.register_file, 0 for i in z:files() do - local path, name = i.filename:match(filter) + local path, name = match(i.filename,filter) if path then if name and name ~= '' then register(files, name, path) @@ -9255,7 +11125,7 @@ function resolvers.register_zip_file(z,tree) n = n + 1 end end - logs.report("fileio",'= zip entries: %s',n) + logs.report("fileio","zip registering, %s files registered",n) return files end @@ -9266,12 +11136,14 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-crl'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local gsub = string.gsub + curl = curl or { } curl.cached = { } @@ -9280,9 +11152,9 @@ curl.cachepath = caches.definepath("curl") local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders function curl.fetch(protocol, name) - local cachename = curl.cachepath() .. "/" .. name:gsub("[^%a%d%.]+","-") --- cachename = cachename:gsub("[\\/]", io.fileseparator) - cachename = cachename:gsub("[\\]", "/") -- cleanup + local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-") +-- cachename = gsub(cachename,"[\\/]", io.fileseparator) + cachename = gsub(cachename,"[\\]", "/") -- cleanup if not curl.cached[name] then if not io.exists(cachename) then curl.cached[name] = cachename @@ -9324,6 +11196,164 @@ curl.install('https') curl.install('ftp') +end -- of closure + +do -- create closure to overcome 200 locals limit + +if not modules then modules = { } end modules ['data-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- some loading stuff ... we might move this one to slot 2 depending +-- on the developments (the loaders must not trigger kpse); we could +-- of course use a more extensive lib path spec + +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) + +local gsub, insert = string.gsub, table.insert +local unpack = unpack or table.unpack + +local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs' +local clibformats = { 'lib' } + +local _path_, libpaths, _cpath_, clibpaths + +function package.libpaths() + if not _path_ or package.path ~= _path_ then + _path_ = package.path + libpaths = file.split_path(_path_,";") + end + return libpaths +end + +function package.clibpaths() + if not _cpath_ or package.cpath ~= _cpath_ then + _cpath_ = package.cpath + clibpaths = file.split_path(_cpath_,";") + end + return clibpaths +end + +local function thepath(...) + local t = { ... } t[#t+1] = "?.lua" + local path = file.join(unpack(t)) + if trace_locating then + logs.report("fileio","! appending '%s' to 'package.path'",path) + end + return path +end + +local p_libpaths, a_libpaths = { }, { } + +function package.append_libpath(...) + insert(a_libpath,thepath(...)) +end + +function package.prepend_libpath(...) + insert(p_libpaths,1,thepath(...)) +end + +-- beware, we need to return a loadfile result ! + +local function loaded(libpaths,name,simple) + for i=1,#libpaths do -- package.path, might become option + local libpath = libpaths[i] + local resolved = gsub(libpath,"%?",simple) + if trace_locating then -- more detail + logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved) + end + return loadfile(resolved) + end + end +end + + +package.loaders[2] = function(name) -- was [#package.loaders+1] + if trace_locating then -- mode detail + logs.report("fileio","! locating '%s'",name) + end + for i=1,#libformats do + local format = libformats[i] + local resolved = resolvers.find_file(name,format) or "" + if trace_locating then -- mode detail + logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format) + end + if resolved ~= "" then + if trace_locating then + logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved) + end + return loadfile(resolved) + end + end + -- libpaths + local libpaths, clibpaths = package.libpaths(), package.clibpaths() + local simple = gsub(name,"%.lua$","") + local simple = gsub(simple,"%.","/") + local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple) + if resolved then + return resolved + end + -- + local libname = file.addsuffix(simple,os.libsuffix) + for i=1,#clibformats do + -- better have a dedicated loop + local format = clibformats[i] + local paths = resolvers.expanded_path_list_from_var(format) + for p=1,#paths do + local path = paths[p] + local resolved = file.join(path,libname) + if trace_locating then -- mode detail + logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved) + end + return package.loadlib(resolved,name) + end + end + end + for i=1,#clibpaths do -- package.path, might become option + local libpath = clibpaths[i] + local resolved = gsub(libpath,"?",simple) + if trace_locating then -- more detail + logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath) + end + if resolvers.isreadable.file(resolved) then + if trace_locating then + logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved) + end + return package.loadlib(resolved,name) + end + end + -- just in case the distribution is messed up + if trace_loading then -- more detail + logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name) + end + local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or "" + if resolved ~= "" then + if trace_locating then + logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved) + end + return loadfile(resolved) + end + if trace_locating then + logs.report("fileio",'? unable to locate lib: %s',name) + end +-- return "unable to locate " .. name +end + +resolvers.loadlualib = require + + end -- of closure do -- create closure to overcome 200 locals limit @@ -9437,7 +11467,7 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-aux'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -9445,47 +11475,47 @@ if not modules then modules = { } end modules ['data-aux'] = { local find = string.find -local trace_verbose = false trackers.register("resolvers.verbose", function(v) trace_verbose = v end) +local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") local oldscript = resolvers.clean_path(oldname) - if trace_verbose then + if trace_locating then logs.report("fileio","to be replaced old script %s", oldscript) end local newscripts = resolvers.find_files(newname) or { } if #newscripts == 0 then - if trace_verbose then + if trace_locating then logs.report("fileio","unable to locate new script") end else for i=1,#newscripts do local newscript = resolvers.clean_path(newscripts[i]) - if trace_verbose then + if trace_locating then logs.report("fileio","checking new script %s", newscript) end if oldscript == newscript then - if trace_verbose then + if trace_locating then logs.report("fileio","old and new script are the same") end elseif not find(newscript,scriptpath) then - if trace_verbose then + if trace_locating then logs.report("fileio","new script should come from %s",scriptpath) end elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then - if trace_verbose then + if trace_locating then logs.report("fileio","invalid new script name") end else local newdata = io.loaddata(newscript) if newdata then - if trace_verbose then + if trace_locating then logs.report("fileio","old script content replaced by new content") end io.savedata(oldscript,newdata) break - elseif trace_verbose then + elseif trace_locating then logs.report("fileio","unable to load new script") end end @@ -9500,25 +11530,28 @@ do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmf'] = { version = 1.001, - comment = "companion to luat-lib.tex", + comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local find, gsub, match = string.find, string.gsub, string.match +local getenv, setenv = os.getenv, os.setenv + -- loads *.tmf files in minimal tree roots (to be optimized and documented) function resolvers.check_environment(tree) logs.simpleline() - os.setenv('TMP', os.getenv('TMP') or os.getenv('TEMP') or os.getenv('TMPDIR') or os.getenv('HOME')) - os.setenv('TEXOS', os.getenv('TEXOS') or ("texmf-" .. os.currentplatform())) - os.setenv('TEXPATH', (tree or "tex"):gsub("\/+$",'')) - os.setenv('TEXMFOS', os.getenv('TEXPATH') .. "/" .. os.getenv('TEXOS')) + setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME')) + setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform)) + setenv('TEXPATH', gsub(tree or "tex","\/+$",'')) + setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS')) logs.simpleline() - logs.simple("preset : TEXPATH => %s", os.getenv('TEXPATH')) - logs.simple("preset : TEXOS => %s", os.getenv('TEXOS')) - logs.simple("preset : TEXMFOS => %s", os.getenv('TEXMFOS')) - logs.simple("preset : TMP => %s", os.getenv('TMP')) + logs.simple("preset : TEXPATH => %s", getenv('TEXPATH')) + logs.simple("preset : TEXOS => %s", getenv('TEXOS')) + logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS')) + logs.simple("preset : TMP => %s", getenv('TMP')) logs.simple('') end @@ -9526,27 +11559,27 @@ function resolvers.load_environment(name) -- todo: key=value as well as lua local f = io.open(name) if f then for line in f:lines() do - if line:find("^[%%%#]") then + if find(line,"^[%%%#]") then -- skip comment else - local key, how, value = line:match("^(.-)%s*([<=>%?]+)%s*(.*)%s*$") + local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$") if how then - value = value:gsub("%%(.-)%%", function(v) return os.getenv(v) or "" end) + value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end) if how == "=" or how == "<<" then - os.setenv(key,value) + setenv(key,value) elseif how == "?" or how == "??" then - os.setenv(key,os.getenv(key) or value) + setenv(key,getenv(key) or value) elseif how == "<" or how == "+=" then - if os.getenv(key) then - os.setenv(key,os.getenv(key) .. io.fileseparator .. value) + if getenv(key) then + setenv(key,getenv(key) .. io.fileseparator .. value) else - os.setenv(key,value) + setenv(key,value) end elseif how == ">" or how == "=+" then - if os.getenv(key) then - os.setenv(key,value .. io.pathseparator .. os.getenv(key)) + if getenv(key) then + setenv(key,value .. io.pathseparator .. getenv(key)) else - os.setenv(key,value) + setenv(key,value) end end end @@ -9585,6 +11618,9 @@ if not modules then modules = { } end modules ['luat-sta'] = { -- this code is used in the updater +local gmatch, match = string.gmatch, string.match +local type = type + states = states or { } states.data = states.data or { } states.hash = states.hash or { } @@ -9613,13 +11649,17 @@ function states.set_by_tag(tag,key,value,default,persistent) if d then if type(d) == "table" then local dkey, hkey = key, key - local pre, post = key:match("(.+)%.([^%.]+)$") + local pre, post = match(key,"(.+)%.([^%.]+)$") if pre and post then - for k in pre:gmatch("[^%.]+") do + for k in gmatch(pre,"[^%.]+") do local dk = d[k] if not dk then dk = { } d[k] = dk + elseif type(dk) == "string" then + -- invalid table, unable to upgrade structure + -- hope for the best or delete the state file + break end d = dk end @@ -9647,7 +11687,7 @@ function states.get_by_tag(tag,key,default) else local d = states.data[tag] if d then - for k in key:gmatch("[^%.]+") do + for k in gmatch(key,"[^%.]+") do local dk = d[k] if dk then d = dk @@ -9782,6 +11822,7 @@ own.libs = { -- todo: check which ones are really needed 'l-os.lua', 'l-file.lua', 'l-md5.lua', + 'l-url.lua', 'l-dir.lua', 'l-boolean.lua', 'l-math.lua', @@ -9790,11 +11831,13 @@ own.libs = { -- todo: check which ones are really needed 'l-utils.lua', 'l-aux.lua', -- 'l-xml.lua', + 'trac-tra.lua', 'lxml-tab.lua', - 'lxml-pth.lua', - 'lxml-ent.lua', + 'lxml-lpt.lua', +-- 'lxml-ent.lua', 'lxml-mis.lua', - 'trac-tra.lua', + 'lxml-aux.lua', + 'lxml-xml.lua', 'luat-env.lua', 'trac-inf.lua', 'trac-log.lua', @@ -9809,7 +11852,7 @@ own.libs = { -- todo: check which ones are really needed -- 'data-bin.lua', 'data-zip.lua', 'data-crl.lua', --- 'data-lua.lua', + 'data-lua.lua', 'data-kps.lua', -- so that we can replace kpsewhich 'data-aux.lua', -- updater 'data-tmf.lua', -- tree files @@ -9827,7 +11870,8 @@ end -- End of hack. -own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua' +own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua' + own.path = string.match(own.name,"^(.+)[\\/].-$") or "." own.list = { '.' } @@ -9865,18 +11909,25 @@ if not resolvers then os.exit() end -logs.setprogram('MTXrun',"TDS Runner Tool 1.22",environment.arguments["verbose"] or false) +logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false) local instance = resolvers.reset() +local trackspec = environment.argument("trackers") or environment.argument("track") + +if trackspec then + trackers.enable(trackspec) +end + runners = runners or { } -- global messages = messages or { } messages.help = [[ ---script run an mtx script (--noquotes) ---execute run a script or program (--noquotes) +--script run an mtx script (lua prefered method) (--noquotes), no script gives list +--execute run a script or program (texmfstart method) (--noquotes) --resolve resolve prefixed arguments --ctxlua run internally (using preloaded libs) +--internal run script using built in libraries (same as --ctxlua) --locate locate given filename --autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree' @@ -9893,16 +11944,20 @@ messages.help = [[ --unix create unix (linux) stubs --verbose give a bit more info +--trackers=list enable given trackers --engine=str target engine --progname=str format or backend --edit launch editor with found file --launch (--all) launch files like manuals, assumes os support ---intern run script using built in libraries +--timedrun run a script an time its run +--autogenerate regenerate databases if needed (handy when used to run context in an editor) + +--usekpse use kpse as fallback (when no mkiv and cache installed, often slower) +--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality) ---usekpse use kpse as fallback (when no mkiv and cache installed, often slower) ---forcekpse force using kpse (handy when no mkiv and cache installed but less functionality) +--prefixes show supported prefixes ]] runners.applications = { @@ -9918,20 +11973,17 @@ runners.suffixes = { } runners.registered = { - texexec = { 'texexec.rb', true }, -- context mkii runner (only tool not to be luafied) + texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied) texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it) texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied - -- texwork = { \texwork.pl', false }, -- perltk based editing environment, only used at pragma - + -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma makempy = { 'makempy.pl', true }, mptopdf = { 'mptopdf.pl', true }, pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced) - -- examplex = { 'examplex.rb', false }, concheck = { 'concheck.rb', false }, - runtools = { 'runtools.rb', true }, textools = { 'textools.rb', true }, tmftools = { 'tmftools.rb', true }, @@ -9943,7 +11995,6 @@ runners.registered = { xmltools = { 'xmltools.rb', true }, -- luatools = { 'luatools.lua', true }, mtxtools = { 'mtxtools.rb', true }, - pdftrimwhite = { 'pdftrimwhite.pl', false } } @@ -9952,6 +12003,13 @@ runners.launchers = { unix = { } } +-- like runners.libpath("framework"): looks on script's subpath + +function runners.libpath(...) + package.prepend_libpath(file.dirname(environment.ownscript),...) + package.prepend_libpath(file.dirname(environment.ownname) ,...) +end + function runners.prepare() local checkname = environment.argument("ifchanged") if checkname and checkname ~= "" then @@ -9996,7 +12054,7 @@ function runners.prepare() return "run" end -function runners.execute_script(fullname,internal) +function runners.execute_script(fullname,internal,nosplit) local noquote = environment.argument("noquotes") if fullname and fullname ~= "" then local state = runners.prepare() @@ -10036,17 +12094,20 @@ function runners.execute_script(fullname,internal) end end if result and result ~= "" then - local before, after = environment.split_arguments(fullname) -- already done - environment.arguments_before, environment.arguments_after = before, after + if not no_split then + local before, after = environment.split_arguments(fullname) -- already done + environment.arguments_before, environment.arguments_after = before, after + end if internal then - arg = { } for _,v in pairs(after) do arg[#arg+1] = v end + arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end + environment.ownscript = result dofile(result) else local binary = runners.applications[file.extname(result)] if binary and binary ~= "" then result = binary .. " " .. result end - local command = result .. " " .. environment.reconstruct_commandline(after,noquote) + local command = result .. " " .. environment.reconstruct_commandline(environment.arguments_after,noquote) if logs.verbose then logs.simpleline() logs.simple("executing: %s",command) @@ -10054,8 +12115,24 @@ function runners.execute_script(fullname,internal) logs.simpleline() io.flush() end - local code = os.exec(command) -- maybe spawn - return code == 0 + -- no os.exec because otherwise we get the wrong return value + local code = os.execute(command) -- maybe spawn + if code == 0 then + return true + else + if binary then + binary = file.addsuffix(binary,os.binsuffix) + for p in string.gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do + if lfs.isfile(file.join(p,binary)) then + return false + end + end + logs.simpleline() + logs.simple("This script needs '%s' which seems not to be installed.",binary) + logs.simpleline() + end + return false + end end end end @@ -10088,7 +12165,7 @@ function runners.execute_program(fullname) return false end --- the --usekpse flag will fallback on kpse +-- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs) local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010' local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010' @@ -10143,7 +12220,7 @@ function runners.locate_file(filename) end function runners.locate_platform() - runners.report_location(os.currentplatform()) + runners.report_location(os.platform) end function runners.report_location(result) @@ -10176,7 +12253,8 @@ end function runners.save_script_session(filename, list) local t = { } - for _, key in ipairs(list) do + for i=1,#list do + local key = list[i] t[key] = environment.arguments[key] end io.savedata(filename,table.serialize(t,true)) @@ -10265,20 +12343,22 @@ function runners.find_mtx_script(filename) if fullname and fullname ~= "" then return fullname end + -- mtx- prefix checking + local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-" -- context namespace, mtx- - fullname = "mtx-" .. filename + fullname = mtxprefix .. filename fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname end -- context namespace, mtx-s - fullname = "mtx-" .. basename .. "s" .. "." .. suffix + fullname = mtxprefix .. basename .. "s" .. "." .. suffix fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname end -- context namespace, mtx- - fullname = "mtx-" .. basename:gsub("s$","") .. "." .. suffix + fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix fullname = found(fullname) or resolvers.find_file(fullname) if fullname and fullname ~= "" then return fullname @@ -10288,9 +12368,17 @@ function runners.find_mtx_script(filename) return fullname end -function runners.execute_ctx_script(filename,arguments) +function runners.execute_ctx_script(filename) + local arguments = environment.arguments_after local fullname = runners.find_mtx_script(filename) or "" - -- retyr after generate but only if --autogenerate + if file.extname(fullname) == "cld" then + -- handy in editors where we force --autopdf + logs.simple("running cld script: %s",filename) + table.insert(arguments,1,fullname) + table.insert(arguments,"--autopdf") + fullname = runners.find_mtx_script("context") or "" + end + -- retry after generate but only if --autogenerate if fullname == "" and environment.argument("autogenerate") then -- might become the default instance.renewcache = true logs.setverbose(true) @@ -10319,32 +12407,51 @@ function runners.execute_ctx_script(filename,arguments) if logs.verbose then logs.simple("using script: %s\n",fullname) end + environment.ownscript = fullname dofile(fullname) local savename = environment.arguments['save'] - if savename and runners.save_list and not table.is_empty(runners.save_list or { }) then - if type(savename) ~= "string" then savename = file.basename(fullname) end - savename = file.replacesuffix(savename,"cfg") - runners.save_script_session(savename, runners.save_list) + if savename then + local save_list = runners.save_list + if save_list and next(save_list) then + if type(savename) ~= "string" then savename = file.basename(fullname) end + savename = file.replacesuffix(savename,"cfg") + runners.save_script_session(savename,save_list) + end end return true end else - logs.setverbose(true) - if filename == "" then - logs.simple("unknown script, no name given") + -- logs.setverbose(true) + if filename == "" or filename == "help" then local context = resolvers.find_file("mtx-context.lua") + logs.setverbose(true) if context ~= "" then local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed local valid = { } - for _, scriptname in ipairs(result) do - scriptname = string.match(scriptname,".*mtx%-([^%-]-)%.lua") - if scriptname then - valid[#valid+1] = scriptname + table.sort(result) + for i=1,#result do + local scriptname = result[i] + local scriptbase = string.match(scriptname,".*mtx%-([^%-]-)%.lua") + if scriptbase then + local data = io.loaddata(scriptname) + local banner, version = string.match(data,"[\n\r]logs%.extendbanner%s*%(%s*[\"\']([^\n\r]+)%s*(%d+%.%d+)") + if banner then + valid[#valid+1] = { scriptbase, version, banner } + end end end if #valid > 0 then - logs.simple("known scripts: %s",table.concat(valid,", ")) + logs.reportbanner() + logs.reportline() + logs.simple("no script name given, known scripts:") + logs.simple() + for k=1,#valid do + local v = valid[k] + logs.simple("%-12s %4s %s",v[1],v[2],v[3]) + end end + else + logs.simple("no script name given") end else filename = file.addsuffix(filename,"lua") @@ -10358,6 +12465,12 @@ function runners.execute_ctx_script(filename,arguments) end end +function runners.prefixes() + logs.reportbanner() + logs.reportline() + logs.simple(table.concat(resolvers.allprefixes(true)," ")) +end + function runners.timedrun(filename) -- just for me if filename and filename ~= "" then runners.timed(function() os.execute(filename) end) @@ -10385,7 +12498,9 @@ instance.lsrmode = environment.argument("lsr") or false -- maybe the unset has to go to this level -if environment.argument("usekpse") or environment.argument("forcekpse") then +local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))] + +if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then os.setenv("engine","") os.setenv("progname","") @@ -10420,7 +12535,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") then return (kpse_initialized():show_path(name)) or "" end - elseif environment.argument("usekpse") then + elseif environment.argument("usekpse") or is_mkii_stub then resolvers.load() @@ -10449,7 +12564,6 @@ else end - if environment.argument("selfmerge") then -- embed used libraries utils.merger.selfmerge(own.name,own.libs,own.list) @@ -10462,9 +12576,14 @@ elseif environment.argument("selfupdate") then elseif environment.argument("ctxlua") or environment.argument("internal") then -- run a script by loading it (using libs) ok = runners.execute_script(filename,true) -elseif environment.argument("script") or environment.argument("s") then +elseif environment.argument("script") or environment.argument("scripts") then -- run a script by loading it (using libs), pass args - ok = runners.execute_ctx_script(filename,after) + if is_mkii_stub then + -- execute mkii script + ok = runners.execute_script(filename,false,true) + else + ok = runners.execute_ctx_script(filename) + end elseif environment.argument("execute") then -- execute script ok = runners.execute_script(filename) @@ -10491,6 +12610,8 @@ elseif environment.argument("locate") then elseif environment.argument("platform")then -- locate platform runners.locate_platform() +elseif environment.argument("prefixes") then + runners.prefixes() elseif environment.argument("timedrun") then -- locate platform runners.timedrun(filename) @@ -10499,8 +12620,14 @@ elseif environment.argument("help") or filename=='help' or filename == "" then -- execute script elseif filename:find("^bin:") then ok = runners.execute_program(filename) +elseif is_mkii_stub then + -- execute mkii script + ok = runners.execute_script(filename,false,true) else - ok = runners.execute_script(filename) + ok = runners.execute_ctx_script(filename) + if not ok then + ok = runners.execute_script(filename) + end end if os.platform == "unix" then diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mtxtools b/Master/texmf-dist/scripts/context/stubs/unix/mtxtools deleted file mode 100755 index 3803c1c6f4e..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/mtxtools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute mtxtools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mtxworks b/Master/texmf-dist/scripts/context/stubs/unix/mtxworks deleted file mode 100644 index ef8f230c3a3..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/mtxworks +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --script texworks --start diff --git a/Master/texmf-dist/scripts/context/stubs/unix/pdftools b/Master/texmf-dist/scripts/context/stubs/unix/pdftools deleted file mode 100755 index da7bd64cf2a..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/pdftools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute pdftools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/pstopdf b/Master/texmf-dist/scripts/context/stubs/unix/pstopdf deleted file mode 100755 index 059812cceac..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/pstopdf +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute pstopdf.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/rlxtools b/Master/texmf-dist/scripts/context/stubs/unix/rlxtools deleted file mode 100755 index d01987b3c1f..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/rlxtools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute rlxtools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/runtools b/Master/texmf-dist/scripts/context/stubs/unix/runtools deleted file mode 100755 index e21c1a24405..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/runtools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute runtools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/texexec b/Master/texmf-dist/scripts/context/stubs/unix/texexec index 083e500c69c..cd5900ff84c 100755 --- a/Master/texmf-dist/scripts/context/stubs/unix/texexec +++ b/Master/texmf-dist/scripts/context/stubs/unix/texexec @@ -1,2 +1,2 @@ #!/bin/sh -mtxrun --usekpse --execute texexec.rb "$@" +mtxrun --usekpse --execute texexec "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/texfont b/Master/texmf-dist/scripts/context/stubs/unix/texfont deleted file mode 100755 index bc811a640af..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/texfont +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute texfont.pl "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/textools b/Master/texmf-dist/scripts/context/stubs/unix/textools deleted file mode 100755 index 76087ca5729..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/textools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute textools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/texutil b/Master/texmf-dist/scripts/context/stubs/unix/texutil deleted file mode 100755 index f5d9b6f1d81..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/texutil +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute texutil.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/tmftools b/Master/texmf-dist/scripts/context/stubs/unix/tmftools deleted file mode 100755 index 48d32f0fd3a..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/tmftools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute tmftools.rb "$@" diff --git a/Master/texmf-dist/scripts/context/stubs/unix/xmltools b/Master/texmf-dist/scripts/context/stubs/unix/xmltools deleted file mode 100755 index a673d1e7a03..00000000000 --- a/Master/texmf-dist/scripts/context/stubs/unix/xmltools +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -mtxrun --usekpse --execute xmltools.rb "$@" diff --git a/Master/texmf-dist/tex/context/base/anch-bar.mkiv b/Master/texmf-dist/tex/context/base/anch-bar.mkiv index d08573c0f60..1013e76096f 100644 --- a/Master/texmf-dist/tex/context/base/anch-bar.mkiv +++ b/Master/texmf-dist/tex/context/base/anch-bar.mkiv @@ -56,7 +56,7 @@ \newcount\currentsidebar \newdimen\sidebardistance -\def\setupsidebars +\unexpanded\def\setupsidebars {\dodoubleargument\dosetupsidebars} \def\dosetupsidebars[#1][#2]% @@ -77,7 +77,7 @@ \c!rulecolor=red, \c!distance=.5\bodyfontsize] -\def\definesidebar +\unexpanded\def\definesidebar {\dodoubleempty\dodefinesidebar} \def\dodefinesidebar[#1][#2]% @@ -87,7 +87,7 @@ \getparameters [\??br#1][#2]} -\def\startsidebar +\unexpanded\def\startsidebar {\dosingleempty\dostartsidebar} \def\dostartsidebar[#1]% @@ -115,34 +115,19 @@ % \def\dostopsidebar#1% % {\removelastspace\tpos{side:#1}\carryoverpar\egroup} -\def\stopsidebar +\unexpanded\def\stopsidebar {\removelastspace\tpos{side:\the\currentsidebar}\carryoverpar\egroup} \startMPpositionmethod{mpos:bar} - \startMPpositiongraphic{mpos:bar}{linecolor,linewidth,distance}% - StartPage ; - path p ; p := - if \MPp\MPbself=\MPp\MPeself : - (xpart ulcorner Field[Text][Text],\MPy\MPbself+\MPh\MPbself) -- - (xpart llcorner Field[Text][Text],\MPy\MPeself-\MPd\MPeself) ; - elseif RealPageNumber=\MPp\MPbself : - (xpart ulcorner Field[Text][Text],\MPy\MPbself+\MPh\MPbself) -- - (llcorner Field[Text][Text]) ; - elseif RealPageNumber=\MPp\MPeself : - (ulcorner Field[Text][Text]) -- - (xpart llcorner Field[Text][Text],\MPy\MPeself-\MPd\MPeself) ; - else : - (ulcorner Field[Text][Text]) -- - (llcorner Field[Text][Text]) ; - fi ; - p := p shifted (-llcorner Field[Text][Text]-(\MPvar{distance},0)) ; - interim linecap := butt ; - draw p - withpen pencircle scaled \MPvar{linewidth} - withcolor \MPvar{linecolor} ; - StopPage ; - \stopMPpositiongraphic - \MPpositiongraphic{mpos:bar}{}% + \startMPpositiongraphic{mpos:bar}{linecolor,linewidth,distance}% + position_anchor_bar ( + \MPp\MPbself, \MPp\MPeself, + \MPy\MPbself, \MPy\MPeself, + \MPh\MPbself, \MPd\MPeself, + \MPvar{distance}, \MPvar{linewidth}, \MPvar{linecolor} + ) ; + \stopMPpositiongraphic + \MPpositiongraphic{mpos:bar}{}% \stopMPpositionmethod %D We now reimplement the margin rules handler defined in diff --git a/Master/texmf-dist/tex/context/base/anch-pgr.mkiv b/Master/texmf-dist/tex/context/base/anch-pgr.mkiv index c558bb04fa8..248b9ca5e31 100644 --- a/Master/texmf-dist/tex/context/base/anch-pgr.mkiv +++ b/Master/texmf-dist/tex/context/base/anch-pgr.mkiv @@ -76,7 +76,7 @@ \fi \fi\fi} -\def\placepositionanchors % todo : depth pagebox +\unexpanded\def\placepositionanchors % todo : depth pagebox {\ifpositioning \setbox\scratchbox\vbox to \textheight {\simpletopskipcorrection @@ -145,7 +145,7 @@ % obsolete and wrong anyway % -% \long\def\defineMPpositiongraphic#1% +% \long\unexpanded\def\defineMPpositiongraphic#1% % {\long\setvalue{\MPoverlayposprefix#1}} %D The rest of the definitions concerning such overlays may @@ -180,14 +180,14 @@ \vfill}% \fi} -\def\startpositionoverlay#1% +\unexpanded\def\startpositionoverlay#1% {\iftrialtypesetting % we don't want redundant entries in the list \@EA\gobbleuntil\@EA\stoppositionoverlay \else \def\currentpositionoverlay{#1}% \fi} -\def\stoppositionoverlay +\unexpanded\def\stoppositionoverlay {\let\currentpositionoverlay\empty} \def\resetpositionoverlay#1% @@ -741,15 +741,15 @@ lineradius=\textbackgroundparameter\c!radius}}% \stoppositionoverlay} -\def\stoptextbackground +\unexpanded\def\stoptextbackground {\dodostoptextbackground \carryoverpar\egroup} -\def\starttextbackgroundmanual +\unexpanded\def\starttextbackgroundmanual {\dostartattributes{\??td\currenttextbackground}\c!style\c!color\empty \fpos\currentparbackground\ignorespaces} -\def\stoptextbackgroundmanual +\unexpanded\def\stoptextbackgroundmanual {\tpos\currentparbackground \dostopattributes} @@ -797,7 +797,7 @@ % \dostartattributes{\??td\currenttextbackground}\c!style\c!color\empty \nowhitespace -\nobreak % new per 23/04/2006 (else potential break when whitespace) + \nobreak % new per 23/04/2006 (else potential break when whitespace) \seteffectivehsize \doinhibitblank % \blank[\v!disable]% new \par} @@ -816,7 +816,7 @@ \nobreak \vskip-\lineheight \nobreak \nowhitespace \egroup -\bgroup \forgeteverypar % NOT REALLY NEEDED, SAVES HASH/MEM + \bgroup \forgeteverypar % NOT REALLY NEEDED, SAVES HASH/MEM \nobreak \noindent \strut \hfill \kern\zeropoint \doassignsomeskip\textbackgroundparameter\c!bottomoffset\to\textbackgroundskip \ifgridsnapping % experimental, pascal (todo: topoffset in same way) @@ -828,7 +828,7 @@ \else \tpos\currentparbackground \fi -\egroup + \egroup \endgraf % new \textbackgroundparameter\c!after} @@ -900,12 +900,13 @@ \else \MPy#1-\MPy#2-\MPy\textanchor+\MPy\textanchor+\MPh\textanchor \fi - \fi}% + \fi + \relax}% \edef\textparwidth {\the\dimexpr \ifcase\scratchcounter % one page - \dimexpr\MPx#2-\MPx#1% + \MPx#2-\MPx#1% \else % two or more pages / maybe also hang \ifx#3\relax @@ -913,7 +914,8 @@ \else \MPw\MPparanchor-\MPl\MPparanchor-\MPr\MPparanchor \fi - \fi}} + \fi + \relax}} \def\mintextparheight{4\lineheight} @@ -935,7 +937,7 @@ \fi \fi} -\def\definetextbackground +\unexpanded\def\definetextbackground {\dodoubleempty\dodefinetextbackground} \def\dodefinetextbackground[#1][#2]% parent and ..parameter @@ -948,13 +950,13 @@ \c!leftoffset,\c!rightoffset,\c!topoffset,\c!bottomoffset]% \getparameters[\??td#1][#2]% \doifvalue{\??td#1\c!state}\v!start\checktextbackgrounds - \unexpanded\setvalue{#1}% + \setuvalue{#1}% {\groupedcommand{\starttextbackground[#1]}{\stoptextbackground}}% \setvalue{\e!start#1}{\starttextbackground[#1]}% \setvalue{\e!stop #1}{\stoptextbackground}% \fi} -\def\setuptextbackground +\unexpanded\def\setuptextbackground {\dodoubleargument\dosetuptextbackground} \def\dosetuptextbackground[#1][#2]% @@ -1029,6 +1031,8 @@ linecolor=blue, linewidth=1pt] +% these might become macros in mp-page + \startuseMPgraphic{mpos:common:ec} path pa ; pair ca ; color lc ; numeric lw ; lw := \MPvar{linewidth} ; @@ -1632,7 +1636,7 @@ \copyposition{e:#1}{e:#2}% \dosetpositionaction{b:#2}{\dopositionaction{b:#1}}} -\def\definepositionframed +\unexpanded\def\definepositionframed {\dodoubleargument\dodefinepositionframed} \def\dodefinepositionframed[#1][#2]% diff --git a/Master/texmf-dist/tex/context/base/anch-pos.lua b/Master/texmf-dist/tex/context/base/anch-pos.lua index b16fac05c9c..8066af4ceba 100644 --- a/Master/texmf-dist/tex/context/base/anch-pos.lua +++ b/Master/texmf-dist/tex/context/base/anch-pos.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['anch-pos'] = { version = 1.001, - comment = "companion to anch-pos.tex", + comment = "companion to anch-pos.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -12,7 +12,9 @@ can we store much more information in but it's also more efficient.

--ldx]]-- -local texprint, concat, format = tex.print, table.concat, string.format +local concat, format = table.concat, string.format +local texprint, ctxcatcodes = tex.print, tex.ctxcatcodes +local lpegmatch = lpeg.match jobpositions = jobpositions or { } jobpositions.collected = jobpositions.collected or { } @@ -50,90 +52,115 @@ function jobpositions.doifelse(name) commands.testcase(jobpositions.collected[name] or ptbs[name]) end -function jobpositions.MPp(id) local jpi = pcol[id] or ptbs[id] texprint((jpi and jpi[1]) or '0' ) end -function jobpositions.MPx(id) local jpi = pcol[id] or ptbs[id] texprint((jpi and jpi[2]) or '0pt') end -function jobpositions.MPy(id) local jpi = pcol[id] or ptbs[id] texprint((jpi and jpi[3]) or '0pt') end -function jobpositions.MPw(id) local jpi = pcol[id] or ptbs[id] texprint((jpi and jpi[4]) or '0pt') end -function jobpositions.MPh(id) local jpi = pcol[id] or ptbs[id] texprint((jpi and jpi[5]) or '0pt') end -function jobpositions.MPd(id) local jpi = pcol[id] or ptbs[id] texprint((jpi and jpi[6]) or '0pt') end - - - function jobpositions.MPx(id) - local jpi = pcol[id] or ptbs[id] - local x = jpi and jpi[2] - if x then - texprint(format('\\the\\dimexpr %s-%s\\relax',x,dx)) - else - texprint('0pt') - end +function jobpositions.MPp(id) local jpi = pcol[id] or ptbs[id] texprint(ctxcatcodes,(jpi and jpi[1]) or '0' ) end +function jobpositions.MPx(id) local jpi = pcol[id] or ptbs[id] texprint(ctxcatcodes,(jpi and jpi[2]) or '0pt') end +function jobpositions.MPy(id) local jpi = pcol[id] or ptbs[id] texprint(ctxcatcodes,(jpi and jpi[3]) or '0pt') end +function jobpositions.MPw(id) local jpi = pcol[id] or ptbs[id] texprint(ctxcatcodes,(jpi and jpi[4]) or '0pt') end +function jobpositions.MPh(id) local jpi = pcol[id] or ptbs[id] texprint(ctxcatcodes,(jpi and jpi[5]) or '0pt') end +function jobpositions.MPd(id) local jpi = pcol[id] or ptbs[id] texprint(ctxcatcodes,(jpi and jpi[6]) or '0pt') end + + +function jobpositions.MPx(id) + local jpi = pcol[id] or ptbs[id] + local x = jpi and jpi[2] + if x then + texprint(ctxcatcodes,format('\\the\\dimexpr%s-%s\\relax',x,dx)) -- no space after dimexpr ! + else + texprint(ctxcatcodes,'0pt') end - function jobpositions.MPy(id) - local jpi = pcol[id] or ptbs[id] - local y = jpi and jpi[3] - if y then - texprint(format('\\the\\dimexpr %s-%s\\relax',y,dy)) - else - texprint('0pt') - end +end + +function jobpositions.MPy(id) + local jpi = pcol[id] or ptbs[id] + local y = jpi and jpi[3] + if y then + texprint(ctxcatcodes,format('\\the\\dimexpr%s-%s\\relax',y,dy)) -- no space after dimexpr ! + else + texprint(ctxcatcodes,'0pt') end +end -- the following are only for MP so there we can leave out the pt --- can be writes +-- can be writes and no format needed any more function jobpositions.MPxy(id) local jpi = pcol[id] or ptbs[id] if jpi then - texprint(format('(%s-%s,%s-%s)',jpi[2],dx,jpi[3],dy)) + texprint(ctxcatcodes,format('(%s-%s,%s-%s)',jpi[2],dx,jpi[3],dy)) +--~ texprint(ctxcatcodes,'(',jpi[2],'-',dx,',',jpi[3],'-',dy,')') else - texprint('(0,0)') + texprint(ctxcatcodes,'(0,0)') end end + function jobpositions.MPll(id) local jpi = pcol[id] or ptbs[id] if jpi then - texprint(format('(%s-%s,%s-%s-%s)',jpi[2],dx,jpi[3],jpi[6],dy)) + texprint(ctxcatcodes,format('(%s-%s,%s-%s-%s)',jpi[2],dx,jpi[3],jpi[6],dy)) +--~ texprint(ctxcatcodes,'(',jpi[2],'-',dx,',',jpi[3],'-',jpi[6],'-',dy,')') else - texprint('(0,0)') + texprint(ctxcatcodes,'(0,0)') end end + function jobpositions.MPlr(id) local jpi = pcol[id] or ptbs[id] if jpi then - texprint(format('(%s+%s-%s,%s-%s-%s)',jpi[2],jpi[4],dx,jpi[3],jpi[6],dy)) + texprint(ctxcatcodes,format('(%s+%s-%s,%s-%s-%s)',jpi[2],jpi[4],dx,jpi[3],jpi[6],dy)) +--~ texprint(ctxcatcodes,'(',jpi[2],'+',jpi[4],'-',dx,',',jpi[3],'-',jpi[6],'-',dy,')') else - texprint('(0,0)') + texprint(ctxcatcodes,'(0,0)') end end + function jobpositions.MPur(id) local jpi = pcol[id] or ptbs[id] if jpi then - texprint(format('(%s+%s-%s,%s+%s-%s)',jpi[2],jpi[4],dx,jpi[3],jpi[5],dy)) + texprint(ctxcatcodes,format('(%s+%s-%s,%s+%s-%s)',jpi[2],jpi[4],dx,jpi[3],jpi[5],dy)) +--~ texprint(ctxcatcodes,'(',jpi[2],'+',jpi[4],'-',dx,',',jpi[3],'+',jpi[5],'-',dy,')') else - texprint('(0,0)') + texprint(ctxcatcodes,'(0,0)') end end + function jobpositions.MPul(id) local jpi = pcol[id] or ptbs[id] if jpi then - texprint(format('(%s-%s,%s+%s-%s)',jpi[2],dx,jpi[3],jpi[5],dy)) + texprint(ctxcatcodes,format('(%s-%s,%s+%s-%s)',jpi[2],dx,jpi[3],jpi[5],dy)) +--~ texprint(ctxcatcodes,'(',jpi[2],'-',dx,',',jpi[3],'+',jpi[5],'-',dy,')') else - texprint('(0,0)') + texprint(ctxcatcodes,'(0,0)') end end + function jobpositions.MPpos(id) local jpi = pcol[id] or ptbs[id] if jpi then - texprint(concat(jpi,',',1,6)) + texprint(ctxcatcodes,concat(jpi,',',1,6)) else - texprint('0,0,0,0,0,0') + texprint(ctxcatcodes,'0,0,0,0,0,0') end end + +local splitter = lpeg.Ct(lpeg.splitat(",")) + function jobpositions.MPplus(id,n,default) local jpi = pcol[id] or ptbs[id] - texprint((jpi and jpi[6+n]) or default) + if not jpi then + texprint(ctxcatcodes,default) + else + local split = jpi[0] + if not split then + split = lpegmatch(splitter,jpi[7]) + jpi[0] = split + end + texprint(ctxcatcodes,split[n] or default) + end end + function jobpositions.MPrest(id,default) local jpi = pcol[id] or ptbs[id] - texprint((jpi and jpi[7] and concat(jpi,",",7,#jpi)) or default) + -- texprint(ctxcatcodes,(jpi and jpi[7] and concat(jpi,",",7,#jpi)) or default) + texprint(ctxcatcodes,(jpi and jpi[7]) or default) end diff --git a/Master/texmf-dist/tex/context/base/anch-pos.mkiv b/Master/texmf-dist/tex/context/base/anch-pos.mkiv index 0ddda9af40f..e454747daf2 100644 --- a/Master/texmf-dist/tex/context/base/anch-pos.mkiv +++ b/Master/texmf-dist/tex/context/base/anch-pos.mkiv @@ -205,18 +205,18 @@ % \the\dimexpr\noexpand\lastsavedpositionx\ifnum\positionanchormode=\plusone-\MPx\pageanchor\fi\relax % \the\dimexpr\noexpand\lastsavedpositiony\ifnum\positionanchormode=\plusone-\MPx\pageanchor\fi\relax -\def\MPp #1{\ctxlua{jobpositions.MPp("#1")}} -\def\MPx #1{\ctxlua{jobpositions.MPx("#1")}} -\def\MPy #1{\ctxlua{jobpositions.MPy("#1")}} -\def\MPw #1{\ctxlua{jobpositions.MPw("#1")}} -\def\MPh #1{\ctxlua{jobpositions.MPh("#1")}} -\def\MPd #1{\ctxlua{jobpositions.MPd("#1")}} -\def\MPxy #1{\ctxlua{jobpositions.MPxy("#1")}} -\def\MPll #1{\ctxlua{jobpositions.MPll("#1")}} -\def\MPlr #1{\ctxlua{jobpositions.MPlr("#1")}} -\def\MPur #1{\ctxlua{jobpositions.MPur("#1")}} -\def\MPul #1{\ctxlua{jobpositions.MPul("#1")}} -\def\MPpos #1{\ctxlua{jobpositions.MPpos("#1")}} +\def\MPp #1{\ctxlua{jobpositions.MPp("#1")}} +\def\MPx #1{\ctxlua{jobpositions.MPx("#1")}} +\def\MPy #1{\ctxlua{jobpositions.MPy("#1")}} +\def\MPw #1{\ctxlua{jobpositions.MPw("#1")}} +\def\MPh #1{\ctxlua{jobpositions.MPh("#1")}} +\def\MPd #1{\ctxlua{jobpositions.MPd("#1")}} +\def\MPxy #1{\ctxlua{jobpositions.MPxy("#1")}} +\def\MPll #1{\ctxlua{jobpositions.MPll("#1")}} +\def\MPlr #1{\ctxlua{jobpositions.MPlr("#1")}} +\def\MPur #1{\ctxlua{jobpositions.MPur("#1")}} +\def\MPul #1{\ctxlua{jobpositions.MPul("#1")}} +\def\MPpos#1{\ctxlua{jobpositions.MPpos("#1")}} %D \macros %D {MPplus, MPrest, MPv, MPvv} diff --git a/Master/texmf-dist/tex/context/base/anch-snc.mkiv b/Master/texmf-dist/tex/context/base/anch-snc.mkiv index 1f361011102..f976047c9b9 100644 --- a/Master/texmf-dist/tex/context/base/anch-snc.mkiv +++ b/Master/texmf-dist/tex/context/base/anch-snc.mkiv @@ -22,7 +22,7 @@ \ifx\s!preset \undefined \def\s!preset {preset} \fi \ifx\s!syncpos\undefined \def\s!syncpos{syncpos} \fi -\def\definesyncpositions[#1]% +\unexpanded\def\definesyncpositions[#1]% {\global\let\flushsyncpositions\doflushsyncpositions % only when used \setcounter{\s!num:\s!syncpos:#1}{0}% \doglobal\appendtoksonce\csname\s!reset :\s!syncpos:#1\endcsname\to\resetsyncpositions diff --git a/Master/texmf-dist/tex/context/base/attr-ini.lua b/Master/texmf-dist/tex/context/base/attr-ini.lua index 535488c35b1..81c2f4744ca 100644 --- a/Master/texmf-dist/tex/context/base/attr-ini.lua +++ b/Master/texmf-dist/tex/context/base/attr-ini.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['attr-ini'] = { version = 1.001, - comment = "companion to attr-ini.tex", + comment = "companion to attr-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -15,8 +15,10 @@ local concat = table.concat local texsprint = tex.sprint local ctxcatcodes = tex.ctxcatcodes +local unsetvalue = attributes.unsetvalue --- todo: document this +-- todo: document this but first reimplement this as it reflects the early +-- days of luatex / mkiv and we have better ways now -- nb: attributes: color etc is much slower than normal (marks + literals) but ... -- nb. too many "0 g"s @@ -60,7 +62,6 @@ colors.data = colors.data or { } colors.values = colors.values or { } colors.registered = colors.registered or { } -colors.enabled = true colors.weightgray = true colors.attribute = attributes.private('color') colors.selector = attributes.private('colormodel') @@ -79,10 +80,13 @@ local templates = { } local models = { - all = 1, - gray = 2, - rgb = 3, - cmyk = 4, + [interfaces.variables.none] = unsetvalue, + black = unsetvalue, + bw = unsetvalue, + all = 1, + gray = 2, + rgb = 3, + cmyk = 4, } colors.model = "all" @@ -94,8 +98,7 @@ local registered = colors.registered local numbers = attributes.numbers local list = attributes.list -local min = math.min -local max = math.max +local min, max, floor = math.min, math.max, math.floor local nodeinjections = backends.nodeinjections local codeinjections = backends.codeinjections @@ -121,10 +124,77 @@ local function cmyktogray(c,m,y,k) return rgbtogray(cmyktorgb(c,m,y,k)) end +-- http://en.wikipedia.org/wiki/HSI_color_space +-- http://nl.wikipedia.org/wiki/HSV_(kleurruimte) + + +local function hsvtorgb(h,s,v) + -- h = h % 360 + local hd = h/60 + local hf = floor(hd) + local hi = hf % 6 + -- local f = hd - hi + local f = hd - hf + local p = v * (1 - s) + local q = v * (1 - f * s) + local t = v * (1 - (1 - f) * s) + if hi == 0 then + return v, t, p + elseif hi == 1 then + return q, v, p + elseif hi == 2 then + return p, v, t + elseif hi == 3 then + return p, q, v + elseif hi == 4 then + return t, p, v + elseif hi == 5 then + return v, p, q + else + print("error in hsv -> rgb",hi,h,s,v) + end +end + +function rgbtohsv(r,g,b) + local offset, maximum, other_1, other_2 + if r >= g and r >= b then + offset, maximum, other_1, other_2 = 0, r, g, b + elseif g >= r and g >= b then + offset, maximum, other_1, other_2 = 2, g, b, r + else + offset, maximum, other_1, other_2 = 4, b, r, g + end + if maximum == 0 then + return 0, 0, 0 + end + local minimum = other_1 < other_2 and other_1 or other_2 + if maximum == minimum then + return 0, 0, maximum + end + local delta = maximum - minimum + return (offset + (other_1-other_2)/delta)*60, delta/maximum, maximum +end + +function graytorgb(s) -- unweighted + return 1-s, 1-s, 1-s +end + +function hsvtogray(h,s,v) + return rgb_to_gray(hsv_to_rgb(h,s,v)) +end + +function grayto_hsv(s) + return 0, 0, s +end + colors.rgbtocmyk = rgbtocmyk colors.rgbtogray = rgbtogray colors.cmyktorgb = cmyktorgb colors.cmyktogray = cmyktogray +colors.rgbtohsv = rgbtohsv +colors.hsvtorgb = hsvtorgb +colors.hsvtogray = hsvtogray +colors.graytohsv = graytohsv -- we can share some *data by using s, rgb and cmyk hashes, but -- normally the amount of colors is not that large; storing the @@ -192,16 +262,20 @@ local function reviver(data,n) d = { gray, gray, gray, gray } logs.report("attributes","unable to revive color %s",n or "?") else - local kind, gray, rgb, cmyk = v[1], graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9]) + local kind = v[1] if kind == 2 then + local gray= graycolor(v[2]) d = { gray, gray, gray, gray } elseif kind == 3 then + local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9]) d = { rgb, gray, rgb, cmyk } elseif kind == 4 then + local gray, rgb, cmyk = graycolor(v[2]), rgbcolor(v[3],v[4],v[5]), cmykcolor(v[6],v[7],v[8],v[9]) d = { cmyk, gray, rgb, cmyk } elseif kind == 5 then local spot = spotcolor(v[10],v[11],v[12],v[13]) - d = { spot, gray, rgb, cmyk } + -- d = { spot, gray, rgb, cmyk } + d = { spot, spot, spot, spot } end end data[n] = d @@ -226,7 +300,7 @@ function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but local stamp = format(templates[colorspace],...) local color = registered[stamp] if not color then - color = #values+1 + color = #values + 1 values[color] = colors[colorspace](...) registered[stamp] = color -- colors.reviver(color) @@ -250,13 +324,16 @@ shipouts.handle_color = nodes.install_attribute_handler { resolver = function() return colors.main end, } +function colors.enable() + tasks.enableaction("shipouts","shipouts.handle_color") +end + -- transparencies transparencies = transparencies or { } transparencies.registered = transparencies.registered or { } transparencies.data = transparencies.data or { } transparencies.values = transparencies.values or { } -transparencies.enabled = false transparencies.triggering = true transparencies.attribute = attributes.private('transparency') @@ -336,24 +413,27 @@ shipouts.handle_transparency = nodes.install_attribute_handler { processor = states.process, } ---- overprint / knockout +function transparencies.enable() + tasks.enableaction("shipouts","shipouts.handle_transparency") +end + +--- colorintents: overprint / knockout -overprints = overprints or { } -overprints.data = overprints.data or { } -overprints.enabled = false -overprints.attribute = attributes.private('overprint') +colorintents = colorintents or { } +colorintents.data = colorintents.data or { } +colorintents.attribute = attributes.private('colorintent') -overprints.registered = { +colorintents.registered = { overprint = 1, knockout = 2, } -local data, registered = overprints.data, overprints.registered +local data, registered = colorintents.data, colorintents.registered -local function extender(overprints,key) +local function extender(colorintents,key) if key == "none" then local d = data[2] - overprints.none = d + colorintents.none = d return d end end @@ -370,26 +450,29 @@ local function reviver(data,n) end end -setmetatable(overprints, { __index = extender }) -setmetatable(overprints.data, { __index = reviver }) +setmetatable(colorintents, { __index = extender }) +setmetatable(colorintents.data, { __index = reviver }) -function overprints.register(stamp) +function colorintents.register(stamp) return registered[stamp] or registered.overprint end -shipouts.handle_overprint = nodes.install_attribute_handler { - name = "overprint", - namespace = overprints, +shipouts.handle_colorintent = nodes.install_attribute_handler { + name = "colorintent", + namespace = colorintents, initializer = states.initialize, finalizer = states.finalize, processor = states.process, } +function colorintents.enable() + tasks.enableaction("shipouts","shipouts.handle_colorintent") +end + --- negative / positive negatives = negatives or { } negatives.data = negatives.data or { } -negatives.enabled = false negatives.attribute = attributes.private("negative") negatives.registered = { @@ -434,13 +517,16 @@ shipouts.handle_negative = nodes.install_attribute_handler { processor = states.process, } +function negatives.enable() + tasks.enableaction("shipouts","shipouts.handle_negative") +end + -- effects -- can be optimized (todo: metatables) effects = effects or { } effects.data = effects.data or { } effects.values = effects.values or { } effects.registered = effects.registered or { } -effects.enabled = false effects.stamp = "%s:%s:%s" effects.attribute = attributes.private("effect") @@ -463,7 +549,7 @@ end local function reviver(data,n) local e = values[n] -- we could nil values[n] now but hardly needed - local d = effect(v[1],v[2],v[3]) + local d = effect(e[1],e[2],e[3]) data[n] = d return d end @@ -490,6 +576,10 @@ shipouts.handle_effect = nodes.install_attribute_handler { processor = states.process, } +function effects.enable() + tasks.enableaction("shipouts","shipouts.handle_effect") +end + -- layers (ugly code, due to no grouping and such); currently we use exclusive layers -- but when we need it stacked layers might show up too; the next function based -- approach can be replaced by static (metatable driven) resolvers @@ -499,7 +589,6 @@ viewerlayers.data = viewerlayers.data or { } viewerlayers.registered = viewerlayers.registered or { } viewerlayers.values = viewerlayers.values or { } viewerlayers.listwise = viewerlayers.listwise or { } -viewerlayers.enabled = false viewerlayers.attribute = attributes.private("viewerlayer") storage.register("viewerlayers/registered", viewerlayers.registered, "viewerlayers.registered") @@ -553,3 +642,7 @@ shipouts.handle_viewerlayer = nodes.install_attribute_handler { finalizer = states.finalize, processor = states.stacked, } + +function viewerlayers.enable() + tasks.enableaction("shipouts","shipouts.handle_viewerlayer") +end diff --git a/Master/texmf-dist/tex/context/base/attr-ini.mkiv b/Master/texmf-dist/tex/context/base/attr-ini.mkiv index b90d4976b00..87d06c48a44 100644 --- a/Master/texmf-dist/tex/context/base/attr-ini.mkiv +++ b/Master/texmf-dist/tex/context/base/attr-ini.mkiv @@ -20,6 +20,21 @@ \registerctxluafile{attr-ini}{1.001} +%D This might move: + +\def\pushattribute#1% + {\global\advance\csname\??ae:\string#1\endcsname\plusone + \global\expandafter\mathchardef\csname\??ae:\string#1:\number\csname\??ae:\string#1\endcsname\endcsname\attribute#1} + +\def\popattribute#1% + {\attribute#1\csname\??ae:\string#1:\number\csname\??ae:\string#1\endcsname\endcsname + \global\advance\csname\??ae:\string#1\endcsname\minusone} + +\def\installattributestack#1% + {\expandafter\newcount\csname\??ae:\string#1\endcsname} + +%D For the moment we put this here (later it will move to where it's used): + \definesystemattribute[state] \definesystemattribute[skip] \definesystemattribute[penalty] @@ -27,14 +42,16 @@ \definesystemattribute[color] \chardef\colorattribute \dogetattributeid{color} \definesystemattribute[transparency] \chardef\transparencyattribute \dogetattributeid{transparency} \definesystemattribute[background] \chardef\backgroundattribute \dogetattributeid{background} -\definesystemattribute[overprint] -\definesystemattribute[negative] -\definesystemattribute[effect] +\definesystemattribute[colorintent] \chardef\colorintentattribute \dogetattributeid{colorintent} +\definesystemattribute[negative] \chardef\negativeattribute \dogetattributeid{negative} +\definesystemattribute[effect] \chardef\effectattribute \dogetattributeid{effect} \definesystemattribute[viewerlayer] \chardef\viewerlayerattribute \dogetattributeid{viewerlayer} \definesystemattribute[layoutcomponent] \chardef\layoutcomponentattribute\dogetattributeid{layoutcomponent} \definesystemattribute[reference] \chardef\referenceattribute \dogetattributeid{reference} \definesystemattribute[destination] \chardef\destinationattribute \dogetattributeid{destination} \definesystemattribute[graphicvadjust] \chardef\graphicvadjustattribute \dogetattributeid{graphicvadjust} +\definesystemattribute[ruled] \chardef\ruledattribute \dogetattributeid{ruled} +\definesystemattribute[shifted] \chardef\shiftedattribute \dogetattributeid{shifted} % \definesystemattribute[ignore] % @@ -53,14 +70,14 @@ \newcount\currentcolormodel -\def\setcolormodel#1% +\def\dosetcolormodel#1% {\currentcolormodel\ctxlua{tex.print(colors.setmodel('#1'))}% - \dosetattribute{colormodel}{\the\currentcolormodel}} + \attribute\colormodelattribute\currentcolormodel} -\setcolormodel{all} +\dosetcolormodel{all} \appendtoks - \setcolormodel{all}% redundant? + \dosetcolormodel{all}% redundant? \to \everyjob \def\registerrgbcolor #1#2#3#4{\ctxlua{colors.register('#1','rgb' ,#2,#3,#4)}} @@ -70,12 +87,12 @@ % transparency \def\registertransparency#1#2#3% - {\setevalue{(ts:#1)}{\dosetattribute{transparency}{\ctxlua{tex.print(transparencies.register(#2,#3))}}}} + {\setevalue{(ts:#1)}{\attribute\transparencyattribute\ctxlua{tex.write(transparencies.register(#2,#3))} }} \def\sometransparencyswitch#1{\csname(ts:#1)\endcsname} \def\sometransparencyswitch - {\ctxlua{transparencies.enabled=true}% + {\ctxlua{transparencies.enable()}% \gdef\sometransparencyswitch##1{\csname(ts:##1)\endcsname}% \sometransparencyswitch} @@ -84,24 +101,28 @@ % overprint -\def\registeroverprint#1#2% - {\setevalue{(os:#1)}{\dosetattribute{overprint}{\ctxlua{tex.print(overprints.register('#2'))}}}} +\def\registercolorintent#1#2% + {\setevalue{(os:#1)}{\attribute\colorintentattribute\ctxlua{tex.write(colorintents.register('#2'))} }} -\def\dotriggeroverprint - {\ctxlua{overprints.enabled=true}% - \gdef\dotriggeroverprint##1{\csname(os:##1)\endcsname}% - \dotriggeroverprint} +\def\dotriggercolorintent + {\ctxlua{colorintents.enable()}% + \gdef\dotriggercolorintent##1{\csname(os:##1)\endcsname}% + \dotriggercolorintent} -\registeroverprint{knockout} {knockout} -\registeroverprint{overprint}{overprint} +\registercolorintent{knockout} {knockout} +\registercolorintent{overprint}{overprint} + +\installattributestack\colorintentattribute + +\setevalue{(os:#\v!none}{\attribute\colorintentattribute\attributeunsetvalue} % does this work out ok? % negative \def\registernegative#1#2% - {\setevalue{(ns:#1)}{\dosetattribute{negative}{\ctxlua{tex.print(negatives.register('#2'))}}}} + {\setevalue{(ns:#1)}{\attribute\negativeattribute\ctxlua{tex.write(negatives.register('#2'))} }} \def\dotriggernegative - {\ctxlua{negatives.enabled=true}% + {\ctxlua{negatives.enable()}% \gdef\dotriggernegative##1{\csname(ns:##1)\endcsname}% \dotriggernegative} @@ -112,10 +133,10 @@ \def\registereffect#1#2#3% #2=stretch #3=rulethickness {\setxvalue{(es:#1:#2:\number\dimexpr#3\relax)}% - {\dosetattribute{effect}{\ctxlua{tex.print(effects.register('#1',#2,\number\dimexpr#3\relax))}}}} + {\attribute\effectattribute\ctxlua{tex.write(effects.register('#1',#2,\number\dimexpr#3\relax))} }} \def\dotriggereffect - {\ctxlua{effects.enabled=true}% + {\ctxlua{effects.enable()}% \gdef\dotriggereffect##1##2##3% {\ifcsname(es:##1:##2:\number\dimexpr##3\relax)\endcsname\else\registereffect{##1}{##2}{##3}\fi \csname(es:##1:##2:\number\dimexpr##3\relax)\endcsname}% @@ -129,20 +150,15 @@ % viewerlayers (will probably change a bit) -% \def\registerviewerlayer#1#2% global ! -% {\setxvalue{(vl:#1)}{\dosetattribute{viewerlayer}{\ctxlua{tex.print(viewerlayers.register('#2'))}}}} -% -% \setevalue{(vl:)}{\doresetattribute{viewerlayer}} -% % needs to work over stopitemize grouping etc \def\registerviewerlayer#1#2% global ! - {\setxvalue{(vl:#1)}{\global\dosetattribute{viewerlayer}{\ctxlua{tex.print(viewerlayers.register('#2'))}}}} + {\setxvalue{(vl:#1)}{\global\attribute\viewerlayerattribute\ctxlua{tex.write(viewerlayers.register('#2'))} }} -\setevalue{(vl:)}{\global\doresetattribute{viewerlayer}} +\setevalue{(vl:)}{\global\attribute\viewerlayerattribute\attributeunsetvalue} \def\dotriggerviewerlayer - {\ctxlua{viewerlayers.enabled=true}% + {\ctxlua{viewerlayers.enable()}% \gdef\dotriggerviewerlayer##1{\csname(vl:##1)\endcsname}% \dotriggerviewerlayer} diff --git a/Master/texmf-dist/tex/context/base/back-ini.lua b/Master/texmf-dist/tex/context/base/back-ini.lua index bad6b0282d5..12a487dd4dc 100644 --- a/Master/texmf-dist/tex/context/base/back-ini.lua +++ b/Master/texmf-dist/tex/context/base/back-ini.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['back-ini'] = { version = 1.001, - comment = "companion to back-ini.tex", + comment = "companion to back-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -46,6 +46,7 @@ backends.codeinjections = { registeredsymbol = nothing, registercomment = nothing, + embedfile = nothing, attachfile = nothing, adddocumentinfo = nothing, setupidentity = nothing, @@ -55,7 +56,6 @@ backends.codeinjections = { addtransparencygroup = nothing, typesetfield = nothing, - finishfields = nothing, doiffieldelse = nothing, doiffieldgroupelse = nothing, definefield = nothing, diff --git a/Master/texmf-dist/tex/context/base/back-ini.mkiv b/Master/texmf-dist/tex/context/base/back-ini.mkiv index 07e9147f259..b7bbdb56f01 100644 --- a/Master/texmf-dist/tex/context/base/back-ini.mkiv +++ b/Master/texmf-dist/tex/context/base/back-ini.mkiv @@ -163,6 +163,6 @@ \edef\jobsuffix{#1}% \setsystemmode\jobsuffix} -\def\setupoutput[#1]{} % will be command line switch +\unexpanded\def\setupoutput[#1]{} % will be command line switch \protect \endinput diff --git a/Master/texmf-dist/tex/context/base/back-pdf.lua b/Master/texmf-dist/tex/context/base/back-pdf.lua index 908801d8a95..54e22f1a2bd 100644 --- a/Master/texmf-dist/tex/context/base/back-pdf.lua +++ b/Master/texmf-dist/tex/context/base/back-pdf.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['back-pdf'] = { version = 1.001, - comment = "companion to back-pdf.tex", + comment = "companion to back-pdf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -21,8 +21,7 @@ local concat = table.concat local round = math.round local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues local texsprint, texwrite = tex.sprint, tex.write - -ctxcatcodes = tex.ctxcatcodes +local ctxcatcodes = tex.ctxcatcodes local copy_node = node.copy @@ -32,14 +31,18 @@ local registrations = backends.pdf.registrations local pdfliteral, register = nodes.pdfliteral, nodes.register -local pdfconstant = lpdf.constant -local pdfdictionary = lpdf.dictionary -local pdfarray = lpdf.array -local pdfreference = lpdf.reference -local pdfverbose = lpdf.verbose +local pdfconstant = lpdf.constant +local pdfstring = lpdf.string +local pdfdictionary = lpdf.dictionary +local pdfarray = lpdf.array +local pdfreference = lpdf.reference +local pdfverbose = lpdf.verbose +local pdfflushobject = lpdf.flushobject +local pdfreserveobject = lpdf.reserveobject +local pdfannotation = nodes.pdfannotation -local pdfreserveobj = pdf.reserveobj -local pdfimmediateobj = pdf.immediateobj +local pdfreserveobj = pdf.reserveobj +local pdfimmediateobj = pdf.immediateobj function nodeinjections.rgbcolor(r,g,b) return register(pdfliteral(format("%s %s %s rg %s %s %s RG",r,g,b,r,g,b))) @@ -55,7 +58,7 @@ end function nodeinjections.spotcolor(n,f,d,p) if type(p) == "string" then - p = p:gsub(","," ") -- brr misuse of spot + p = gsub(p,","," ") -- brr misuse of spot end return register(pdfliteral(format("/%s cs /%s CS %s SCN %s scn",n,n,p,p))) end @@ -72,9 +75,9 @@ local effects = { hidden = 3, } -function nodeinjections.effect(stretch,rulethickness,effect) +function nodeinjections.effect(effect,stretch,rulethickness) -- always, no zero test (removed) - rulethickness = number.dimenfactors["bp"]*rulethickness + rulethickness = number.dimenfactors["bp"] * rulethickness effect = effects[effect] or effects['normal'] return register(pdfliteral(format("%s Tc %s w %s Tr",stretch,rulethickness,effect))) -- watch order end @@ -131,7 +134,7 @@ function codeinjections.insertmovie(specification) Movie = moviedict, A = controldict, } - node.write(nodes.pdfannot(width,height,0,action())) + node.write(pdfannotation(width,height,0,action())) end function codeinjections.insertsound(specification) @@ -151,7 +154,7 @@ function codeinjections.insertsound(specification) Movie = sounddict, A = controldict, } - node.write(nodes.pdfannot(0,0,0,action())) + node.write(pdfannotation(0,0,0,action())) end end @@ -162,7 +165,7 @@ local pdf_indexed = pdfconstant("Indexed") local pdf_device_n = pdfconstant("DeviceN") local pdf_device_rgb = pdfconstant("DeviceRGB") local pdf_device_cmyk = pdfconstant("DeviceCMYK") -local pdf_device_gray = pdfconstant("Devicegray") +local pdf_device_gray = pdfconstant("DeviceGray") local pdf_extgstate = pdfconstant("ExtGState") local pdf_rbg_range = pdfarray { 0, 1, 0, 1, 0, 1 } @@ -303,6 +306,7 @@ local function indexcolorref(name) -- actually, names (parent) is the hash end function registrations.rgbspotcolor(name,noffractions,names,p,r,g,b) +--~ print(name,noffractions,names,p,r,g,b) if noffractions == 1 then registersomespotcolor(name,noffractions,names,p,pdf_device_rgb,pdf_rbg_range,format(rgb_function,r,g,b)) else diff --git a/Master/texmf-dist/tex/context/base/back-pdf.mkiv b/Master/texmf-dist/tex/context/base/back-pdf.mkiv index 2d3ba85248b..a10afd5b901 100644 --- a/Master/texmf-dist/tex/context/base/back-pdf.mkiv +++ b/Master/texmf-dist/tex/context/base/back-pdf.mkiv @@ -29,7 +29,7 @@ \pdfpkresolution = 600 \pdfdecimaldigits = 10 \pdfinclusionerrorlevel = 0 -\pdfminorversion = 5 +\pdfminorversion = 6 % maybe even 7 %pdfuniqueresname = 1 %D This one can be consulted by users although the suffix is also @@ -44,13 +44,13 @@ %D These are the only official methods to add stuff to the resources. -\def\pdfbackendsetcatalog #1#2{\ctxlua{lpdf.addtocatalog ("#1",\!!bs#2\!!es)}} \newtoks\pdfcatalog -\def\pdfbackendsetinfo #1#2{\ctxlua{lpdf.addtoinfo ("#1",\!!bs#2\!!es)}} \newtoks\pdfinfo -\def\pdfbackendsetname #1#2{\ctxlua{lpdf.addtonames ("#1",\!!bs#2\!!es)}} \newtoks\pdfnames +\def\pdfbackendsetcatalog #1#2{\ctxlua{lpdf.addtocatalog ("#1",\!!bs#2\!!es)}} +\def\pdfbackendsetinfo #1#2{\ctxlua{lpdf.addtoinfo ("#1",\!!bs#2\!!es)}} +\def\pdfbackendsetname #1#2{\ctxlua{lpdf.addtonames ("#1",\!!bs#2\!!es)}} -\def\pdfbackendsetpageattribute #1#2{\ctxlua{lpdf.addtopageattributes ("#1",\!!bs#2\!!es)}} \newtoks\pdfpageresources -\def\pdfbackendsetpagesattribute#1#2{\ctxlua{lpdf.addtopagesattributes("#1",\!!bs#2\!!es)}} \newtoks\pdfpageattr -\def\pdfbackendsetpageresource #1#2{\ctxlua{lpdf.addtopageresources ("#1",\!!bs#2\!!es)}} \newtoks\pdfpagesattr +\def\pdfbackendsetpageattribute #1#2{\ctxlua{lpdf.addtopageattributes ("#1",\!!bs#2\!!es)}} +\def\pdfbackendsetpagesattribute#1#2{\ctxlua{lpdf.addtopagesattributes("#1",\!!bs#2\!!es)}} +\def\pdfbackendsetpageresource #1#2{\ctxlua{lpdf.addtopageresources ("#1",\!!bs#2\!!es)}} \def\pdfbackendsetextgstate #1#2{\ctxlua{lpdf.adddocumentextgstate ("#1",lpdf.verbose(\!!bs#2\!!es))}} \def\pdfbackendsetcolorspace #1#2{\ctxlua{lpdf.adddocumentcolorspace("#1",lpdf.verbose(\!!bs#2\!!es))}} @@ -59,27 +59,33 @@ \def\pdfbackendcurrentresources {\ctxlua{lpdf.collectedresources()}} +%D Let's block these: + +\let\pdfcatalog \relax \newtoks\pdfcatalog +\let\pdfinfo \relax \newtoks\pdfinfo +\let\pdfnames \relax \newtoks\pdfnames +\let\pdfpageresources\relax \newtoks\pdfpageresources +\let\pdfpageattr \relax \newtoks\pdfpageattr +\let\pdfpagesattr \relax \newtoks\pdfpagesattr + %D An example of usage is: -\appendtoks % will change ... - \pdfbackendsetinfo{ConTeXt.Version}{(\contextversion)}% - \pdfbackendsetinfo{ConTeXt.Time} {(\number\normalyear.\twodigits\normalmonth.\twodigits\normalday\space \twodigits\currenthour:\twodigits\currentminute)}% - \pdfbackendsetinfo{ConTeXt.Jobname}{(\jobname)}% - \pdfbackendsetinfo{ConTeXt.Url} {(www.pragma-ade.com)}% +\appendtoks + \pdfbackendsetinfo{ConTeXt.Version}{\contextversion}% + \pdfbackendsetinfo{ConTeXt.Time} {\number\normalyear.\twodigits\normalmonth.\twodigits\normalday\space \twodigits\currenthour:\twodigits\currentminute}% + \pdfbackendsetinfo{ConTeXt.Jobname}{\jobname}% + \pdfbackendsetinfo{ConTeXt.Url} {www.pragma-ade.com}% \to \everylastbackendshipout %D Unfortunately this is still needed (also for \METAPOST\ to %D \PDF\ converter): -\def\doresetmapfilelist - {\global\let\doresetmapfilelist\relax - \pdfmapfile{original-empty.map}} - -\def\doloadmapfile #1#2{\pdfmapfile{#1#2}} -\def\doloadmapline #1#2{\pdfmapline{#1#2}} - -\appendtoksonce \loadallfontmapfiles \to \pdfbackendeveryxform -\appendtoksonce \loadallfontmapfiles \to \pdfbackendeveryximage +% \def\doresetmapfilelist +% {\global\let\doresetmapfilelist\relax +% \pdfmapfile{original-empty.map}} +% +% \appendtoksonce \loadallfontmapfiles \to \pdfbackendeveryxform +% \appendtoksonce \loadallfontmapfiles \to \pdfbackendeveryximage %D Transformations. Some day we will use primitives (once they're fixed). @@ -101,7 +107,7 @@ \def\dostartmirroring{\pdfliteral{-1 0 0 1 0 0 cm}} \def\dostopmirroring {\pdfliteral{-1 0 0 1 0 0 cm}} -\def\dostartclipping#1#2#3% todo +\def\dostartclipping#1#2#3% todo (still needed?) {\PointsToBigPoints{#2}\width \PointsToBigPoints{#3}\height \grabMPclippath{#1}{1}\width\height{0 0 m \width\space 0 l \width \height l 0 \height l}% @@ -115,7 +121,7 @@ \appendtoks \ctxlua{backends.codeinjections.finalizepage ()}\to \everybackendshipout % is immediate \appendtoks \ctxlua{backends.codeinjections.finalizedocument()}\to \everylastbackendshipout % is immediate -%D Temporary hack, will be removed or improved. +%D Temporary hack, will be removed or improved or default. \def\TransparencyHack{\ctxlua{backends.codeinjections.addtransparencygroup()}} @@ -159,8 +165,11 @@ {\dogetobjectreferencepage{#1}{#2}#3% \doPDFgetpagereference{\ifx#3\empty\realfolio\else#3\fi}#3} +\let\lastpredefinedsymbol\empty % some day we can do more at the lua end + \def\predefinesymbol[#1]% {\begingroup + \xdef\lastpredefinedsymbol{#1}% \settightobject{SYM}{#1}\hbox{\symbol[#1]}% to be checked ... maybe only fitting \dogetobjectreference{SYM}{#1}\lastref \ctxlua{backends.codeinjections.registersymbol("#1",\lastref)}% diff --git a/Master/texmf-dist/tex/context/base/back-u3d.mkiv b/Master/texmf-dist/tex/context/base/back-u3d.mkiv new file mode 100644 index 00000000000..398159feb2c --- /dev/null +++ b/Master/texmf-dist/tex/context/base/back-u3d.mkiv @@ -0,0 +1,156 @@ +%D \module +%D [ file=back-u3d, +%D version=2009.04.15, +%D title=\CONTEXT\ Backend Macros, +%D subtitle=U3D Experiment, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=\PRAGMA] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +% This is only a placeholder that demonstrates the usage of u3d +% resources. The user interface is rather messy an might be +% improved. The files and setup is derived from an example by +% Michael Vidiassov. + +\endinput + +\starttext + +\startluaparameterset [u3d:myset:controls:1] + view = { + name = 'default', + bg = {1,1,1}, + mag = 100, + coo = {0,0,0}, + c2c = {0,0,1}, + rot = {40,0,60}, + roo = 6, + lights = 'CAD' + }, + js = 'cloudq.js' +\stopluaparameterset + +\startluaparameterset [u3d:myset:controls:2] + views = { + { + name = 'AnglePositioning', + bg = {1,1,1}, + azimuth = 45, + altitude = 45, + roo = 50, + aac = 2.5, + lights = 'Artwork' + }, + { + name = 'RotationPositioning', + bg = {1,1,1}, + rot = {0,45,45}, + roo = 50, + aac = 2.5, + lights = 'Artwork' + }, + { + name = 'VectorPositioning', + bg = {1,0,0}, + c2c = {1,1,math.sqrt(2)}, + roo = 50, + aac = 2.5, + lights = 'CAD' + }, + { + name = 'PositionPositioning', + bg = {1,0,0}, + pos = {1+25,1+25,1+50/math.sqrt(2)}, + aac = 2.5, + lights = 'CAD' + }, + { + name = 'ortho', + bg = {1,1,1}, + mag = 300, + lights = 'CAD', + crossection = {} + } + }, + view = { + name = 'default', + bg = {1,1,1}, + c2c = {-1,-1,0}, + roo = 50, + aac = 2.5, + roll = 45, + lights = 'CAD', + crossection = { + normal = {-1,-1,-1}, + transparent = true + }, + nodes = { + { + name = 'xlabel', + visible = false + }, + { + name = 'ylabel', + opacity = 0.5 + }, + { + name = 'zlabel', + rendermode = 'Wireframe' + } + } + } +\stopluaparameterset + +\useexternalfigure + [cloudq] + [cloudq.u3d] + [width=0.7\textwidth, + height=.7\textwidth, + display=u3d:myset:display:1, + controls=u3d:myset:controls:1] + +\useexternalfigure + [axes] + [axes.u3d] + [width=0.7\textwidth, + height=.7\textwidth, + controls=u3d:myset:controls:1] + +\startluaparameterset[u3d:myset:display:2] + toolbar = true, + preview = 'cloudq.png' +\stopluaparameterset +\startluaparameterset[u3d:myset:display:3] + toolbar = true, + tree = false, + preview = 'area.png' +\stopluaparameterset +\startluaparameterset[u3d:myset:display:4] + toolbar = true, + tree = false, + view = { + name = 'view', + bg = {0.1,0.1,0.1}, + c2c = {-1,-1,0}, + roo = 50, + aac = 2.5, + roll = 45, + lights = 'Red' + } +\stopluaparameterset +\startluaparameterset[u3d:myset:display:5] + toolbar = true, + tree = false, + view = 'ortho' +\stopluaparameterset + +\placefigure[here]{none}{\externalfigure[cloudq][display=u3d:myset:display:2]} +\placefigure[here]{none}{\externalfigure[axes] [display=u3d:myset:display:3]} +\placefigure[here]{none}{\externalfigure[axes] [display=u3d:myset:display:4]} +\placefigure[here]{none}{\externalfigure[axes] [display=u3d:myset:display:5,width=0.5\textwidth,height=.5\textwidth]} + +\stoptext diff --git a/Master/texmf-dist/tex/context/base/bibl-bib.lua b/Master/texmf-dist/tex/context/base/bibl-bib.lua index 028202ec22f..3c0dad2fa4e 100644 --- a/Master/texmf-dist/tex/context/base/bibl-bib.lua +++ b/Master/texmf-dist/tex/context/base/bibl-bib.lua @@ -12,8 +12,18 @@ bibtex files and converts them to xml so that the we access the content in a convenient way. Actually handling the data takes place elsewhere.

--ldx]]-- -local lower, format = string.lower, string.format +local utf = unicode.utf8 +local lower, format, gsub, concat = string.lower, string.format, string.gsub, table.concat local next = next +local utfchar = utf.char +local lpegmatch = lpeg.match +local textoutf = characters and characters.tex.toutf +local variables = interfaces and interfaces.variables + +local finalizers = xml.finalizers.tex +local xmlfilter, xmltext = xml.filter, xml.text + +local trace_bibxml = false trackers.register("publications.bibxml", function(v) trace_bibtex = v end) bibtex = bibtex or { } @@ -21,10 +31,29 @@ bibtex.size = 0 bibtex.definitions = 0 bibtex.shortcuts = 0 +local defaultshortcuts = { + jan = "1", + feb = "2", + mar = "3", + apr = "4", + may = "5", + jun = "6", + jul = "7", + aug = "8", + sep = "9", + oct = "10", + nov = "11", + dec = "12", +} + local shortcuts = { } -local data = { } +local data = { } local entries +-- Currently we expand shortcuts and for large ones (like the acknowledgements +-- in tugboat.bib this is not that efficient. However, eventually strings get +-- hashed again. + local function do_shortcut(tag,key,value) bibtex.shortcuts = bibtex.shortcuts + 1 if lower(tag) == "@string" then @@ -49,34 +78,33 @@ local function do_definition(tag,key,tab) -- maybe check entries here (saves mem end local function resolve(s) - return shortcuts[s] or "" -end - -local percent = lpeg.P("%") -local start = lpeg.P("@") -local comma = lpeg.P(",") -local hash = lpeg.P("#") -local escape = lpeg.P("\\") -local single = lpeg.P("'") -local double = lpeg.P('"') -local left = lpeg.P('{') -local right = lpeg.P('}') + return shortcuts[s] or defaultshortcuts[s] or s -- can be number +end + +local P, R, S, C, Cc, Cs, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct + +local percent = P("%") +local start = P("@") +local comma = P(",") +local hash = P("#") +local escape = P("\\") +local single = P("'") +local double = P('"') +local left = P('{') +local right = P('}') local both = left + right -local lineending = lpeg.S("\n\r") -local space = lpeg.S(" \t\n\r\f") +local lineending = S("\n\r") +local space = S(" \t\n\r\f") local spacing = space^0 -local equal = lpeg.P("=") +local equal = P("=") local collapsed = (space^1)/ " " local function add(a,b) if b then return a..b else return a end end -local keyword = lpeg.C((lpeg.R("az","AZ","09") + lpeg.S("@_:-"))^1) -- lpeg.C((1-space)^1) +local keyword = C((lpeg.R("az","AZ","09") + S("@_:-"))^1) -- lpeg.C((1-space)^1) local s_quoted = ((escape*single) + collapsed + (1-single))^0 local d_quoted = ((escape*double) + collapsed + (1-double))^0 -local balanced = lpeg.P { - [1] = ((escape * (left+right)) + (1 - (left+right)) + lpeg.V(2))^0, - [2] = left * lpeg.V(1) * right -} +local balanced = lpeg.patterns.balanced local s_value = (single/"") * s_quoted * (single/"") local d_value = (double/"") * d_quoted * (double/"") @@ -84,7 +112,7 @@ local b_value = (left /"") * balanced * (right /"") local r_value = keyword/resolve local somevalue = s_value + d_value + b_value + r_value -local value = lpeg.Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0)) +local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0)) local assignment = spacing * keyword * spacing * equal * spacing * value * spacing local shortcut = keyword * spacing * left * spacing * (assignment * comma^0)^0 * spacing * right @@ -101,14 +129,21 @@ function bibtex.convert(session,content) data, shortcuts, entries = session.data, session.shortcuts, session.entries -- session.size = session.size + #content bibtex.size = bibtex.size + #content - grammar:match(content or "") + session.size = session.size + #content + lpegmatch(grammar,content or "") statistics.stoptiming(bibtex) end function bibtex.load(session,filename) local filename = resolvers.find_file(filename,"bib") if filename ~= "" then - bibtex.convert(session,io.loaddata(filename) or "") + local data = io.loaddata(filename) or "" + if data == "" then + logs.report("publications","empty file '%s', no conversion to xml",filename) + elseif trace_bibxml then + logs.report("publications","converting file '%s' to xml",filename) + end + bibtex.convert(session,data) end end @@ -119,37 +154,88 @@ function bibtex.new() xml = xml.convert("\n"), size = 0, entries = nil, + loaded = false, } end local escaped_pattern = xml.escaped_pattern -function bibtex.toxml(session) +local ihatethis = { + f = "\\f", + n = "\\n", + r = "\\r", + s = "\\s", + t = "\\t", + v = "\\v", + z = "\\z", +} + +local command = P("\\")/"" * Cc("\\bibtexcommand{") * (R("az","AZ")^1) * Cc("}") +local any = P(1) +local done = P(-1) +local one_l = P("{") / "" +local one_r = P("}") / "" +local two_l = P("{{") / "" +local two_r = P("}}") / "" + +local filter = Cs( + two_l * (command + any - two_r - done)^0 * two_r * done + + one_l * (command + any - one_r - done)^0 * one_r * done + + (command + any )^0 +) + +function bibtex.toxml(session,options) + if session.loaded then + return + else + session.loaded = true + end -- we can always speed this up if needed -- format slows down things a bit but who cares statistics.starttiming(bibtex) local result = { } + local options = aux.settings_to_hash(options) + local convert = options.convert -- todo: interface + local strip = options.strip -- todo: interface local entries = session.entries result[#result+1] = format("") result[#result+1] = format("") for id, categories in next, session.data do - result[#result+1] = format(" ",id) + id = lower(gsub(id,"^@","")) for name, entry in next, categories do if not entries or entries[name] then - result[#result+1] = format(" ",name) + result[#result+1] = format("",lower(name),id) for key, value in next, entry do - value = escaped_pattern:match(value) + value = gsub(value,"\\(.)",ihatethis) + value = lpegmatch(escaped_pattern,value) + if value ~= "" then - result[#result+1] = format(" %s",key,value) + if convert then + value = textoutf(value,true) + end + if strip then + -- as there is no proper namespace in bibtex we need this + -- kind of hackery ... bibtex databases are quite unportable + value = lpegmatch(filter,value) or value + end + result[#result+1] = format(" %s",key,value) end end - result[#result+1] = format(" ") + result[#result+1] = format("") end end - result[#result+1] = format(" ") end result[#result+1] = format("") - session.xml = xml.convert(table.concat(result,"\n")) + result = concat(result,"\n") + -- alternatively we could use lxml.convert + session.xml = xml.convert(result, { + resolve_entities = true, + resolve_predefined_entities = true, -- in case we have escaped entities + -- unify_predefined_entities = true, -- & -> & + utfize_entities = true, + } ) + session.data = nil + session.shortcuts = nil statistics.stoptiming(bibtex) end @@ -204,30 +290,473 @@ end) --~ print(table.serialize(session.shortcuts)) --~ print(xml.serialize(session.xml)) --- this will move: +if not characters then dofile(resolvers.find_file("char-def.lua")) end + +local chardata = characters.data +local concat = table.concat + +local P, Ct, lpegmatch = lpeg.P, lpeg.Ct, lpeg.match + +local space, comma = P(" "), P(",") + +local andsplitter = Ct(lpeg.splitat(space^1 * "and" * space^1)) +local commasplitter = Ct(lpeg.splitat(space^0 * comma * space^0)) +local spacesplitter = Ct(lpeg.splitat(space^1)) +local firstcharacter = lpeg.patterns.utf8byte + +function is_upper(str) + local first = lpegmatch(firstcharacter,str) + local okay = chardata[first] + return okay and okay.category == "lu" +end + +local function splitauthors(str) + local authors = lpegmatch(andsplitter,str) + for i=1,#authors do + local firstnames, vons, surnames, initials, juniors, words + local author = authors[i] + local split = lpegmatch(commasplitter,author) + local n = #split + if n == 1 then + --~ First von Last + words = lpegmatch(spacesplitter,author) + firstnames, vons, surnames = { }, { }, { } + local i, n = 1, #words + while i <= n do + local w = words[i] + if is_upper(w) then + firstnames[#firstnames+1], i = w, i + 1 + else + break + end + end + while i <= n do + local w = words[i] + if is_upper(w) then + break + else + vons[#vons+1], i = w, i + 1 + end + end + while i <= n do + surnames[#surnames+1], i = words[i], i + 1 + end + elseif n == 2 then + --~ von Last, First + words = lpegmatch(spacesplitter,split[2]) + surnames = lpegmatch(spacesplitter,split[1]) + firstnames, vons = { }, { } + local i, n = 1, #words + while i <= n do + local w = words[i] + if is_upper(w) then + firstnames[#firstnames+1], i = w, i + 1 + else + break + end + end + while i <= n do + vons[#vons+1], i = words[i], i + 1 + end + else + --~ von Last, Jr ,First + firstnames = lpegmatch(spacesplitter,split[1]) + juniors = lpegmatch(spacesplitter,split[2]) + surnames = lpegmatch(spacesplitter,split[3]) + if n > 3 then + -- error + end + end + if #surnames == 0 then + surnames[1] = firstnames[#firstnames] + firstnames[#firstnames] = nil + end + if firstnames then + initials = { } + for i=1,#firstnames do + initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i])) + end + end + authors[i] = { + original = author, + firstnames = firstnames, + vons = vons, + surnames = surnames, + initials = initials, + juniors = juniors, + } + end + authors.original = str + return authors +end + +local function the_initials(initials,symbol) + local t, symbol = { }, symbol or "." + for i=1,#initials do + t[i] = initials[i] .. symbol + end + return t +end + +-- authors + +bibtex.authors = bibtex.authors or { } + +local authors = bibtex.authors + +local defaultsettings = { + firstnamesep = " ", + vonsep = " ", + surnamesep = " ", + juniorsep = " ", + surnamejuniorsep = ", ", + juniorjuniorsep = ", ", + surnamefirstnamesep = ", ", + surnameinitialsep = ", ", + namesep = ", ", + lastnamesep = " and ", + finalnamesep = " and ", +} + +function authors.normal(author,settings) + local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors + local result, settings = { }, settings or defaultsettings + if firstnames and #firstnames > 0 then + result[#result+1] = concat(firstnames," ") + result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep + end + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = concat(juniors," ") + result[#result+1] = settings.surnamesep or defaultsettings.surnamesep + end + return concat(result) +end + +function authors.normalshort(author,settings) + local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors + local result, settings = { }, settings or defaultsettings + if firstnames and #firstnames > 0 then + result[#result+1] = concat(firstnames," ") + result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep + end + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = concat(juniors," ") + result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep + end + return concat(result) +end + +function authors.inverted(author,settings) + local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors + local result, settings = { }, settings or defaultsettings + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = settings.juniorjuniorsep or defaultsettings.juniorjuniorsep + result[#result+1] = concat(juniors," ") + end + if firstnames and #firstnames > 0 then + result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep + result[#result+1] = concat(firstnames," ") + end + return concat(result) +end + +function authors.invertedshort(author,settings) + local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors + local result, settings = { }, settings or defaultsettings + if vons and #vons > 0 then + result[#result+1] = concat(vons," ") + result[#result+1] = settings.vonsep or defaultsettings.vonsep + end + if surnames then + result[#result+1] = concat(surnames," ") + end + if juniors and #juniors > 0 then + result[#result+1] = settings.juniorjuniorsep or defaultsettings.juniorjuniorsep + result[#result+1] = concat(juniors," ") + end + if initials and #initials > 0 then + result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep + result[#result+1] = concat(the_initials(initials)," ") + end + return concat(result) +end + +local lastconcatsize = 1 + +local function bibtexconcat(t,settings) + local namesep = settings.namesep or defaultsettings.namesep or ", " + local lastnamesep = settings.lastnamesep or defaultsettings.lastnamesep or namesep + local finalnamesep = settings.finalnamesep or defaultsettings.finalnamesep or lastnamesep + local lastconcatsize = #t + if lastconcatsize > 2 then + local s = { } + for i=1,lastconcatsize-2 do + s[i] = t[i] .. namesep + end + s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize] + return concat(s) + elseif lastconcatsize > 1 then + return concat(t,lastnamesep) + elseif lastconcatsize > 0 then + return t[1] + else + return "" + end +end + +function authors.concat(author,combiner,what,settings) + if type(combiner) == "string" then + combiner = authors[combiner or "normal"] or authors.normal + end + local split = splitauthors(author) + local setting = settings[what] + local etallimit, etaldisplay, etaltext = 1000, 1000, "" + if setting then + etallimit = settings.etallimit or 1000 + etaldisplay = settings.etaldisplay or etallimit + etalltext = settings.etaltext or "" + end + local max = #split + if max > etallimit and etaldisplay < max then + max = etaldisplay + end + for i=1,max do + split[i] = combiner(split[i],settings) + end + local result = bibtexconcat(split,settings) + if max < #split then + return result + else + return result .. etaltext + end +end + +function authors.short(author,year) + local result = { } + if author then + local authors = splitauthors(author) + for a=1,#authors do + local aa = authors[a] + local initials = aa.initials + for i=1,#initials do + result[#result+1] = initials[i] + end + local surnames = aa.surnames + for s=1,#surnames do + result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s])) + end + end + end + if year then + result[#result+1] = year + end + return concat(result) +end + +-- We can consider creating a hashtable key -> entry but I wonder if +-- pays off. + +local function collectauthoryears(id,list) + list = aux.settings_to_hash(list) + id = lxml.get_id(id) + local found = { } + for e in xml.collected(id,"/bibtex/entry") do + if list[e.at.tag] then + local year = xmlfilter(e,"xml:///field[@name='year']/text()") + local author = xmlfilter(e,"xml:///field[@name='author']/text()") + if author and year then + local a = found[author] + if not a then + a = { } + found[author] = a + end + local y = a[year] + if not y then + y = { } + a[year] = y + end + y[#y+1] = e + end + end + end + -- found = { author = { year_1 = { e1, e2, e3 } } } + local done = { } + for author, years in next, found do + local yrs = { } + for year, entries in next, years do + if subyears then + -- -- add letters to all entries of an author and if so shouldn't + -- -- we tag all years of an author as soon as we do this? + -- if #entries > 1 then + -- for i=1,#years do + -- local entry = years[i] + -- -- years[i] = year .. string.char(i + string.byte("0") - 1) + -- end + -- end + else + yrs[#yrs+1] = year + end + end + done[author] = yrs + end + return done +end + +local method, settings = "normal", { } + +function authors.setsettings(s) + settings = s or settings +end if commands then + local texsprint = tex and tex.sprint + local ctxcatcodes = tex and tex.ctxcatcodes + local sessions = { } function commands.definebibtexsession(name) sessions[name] = bibtex.new() end - function commands.preparebibtexsession(name) - bibtex.toxml(sessions[name]) - lxml.register("bibtex:"..name,sessions[name].xml) + + function commands.preparebibtexsession(name,xmlname,options) + bibtex.toxml(sessions[name],options) + lxml.register(xmlname,sessions[name].xml) end + function commands.registerbibtexfile(name,filename) bibtex.load(sessions[name],filename) end + function commands.registerbibtexentry(name,entry) local session = sessions[name] local entries = session.entries if not entries then - session.entries = { [entry] = true } + session.entries = { [entry] = true } -- here we can keep more info else entries[entry] = true end end + -- commands.bibtexconcat = bibtexconcat + + -- finalizers can be rather dumb as we have just text and no embedded xml + + function finalizers.bibtexconcat(collected,method,what) + if collected then + local author = collected[1].dt[1] or "" + if author ~= "" then + texsprint(ctxcatcodes,authors.concat(author,method,what,settings)) + end + end + end + + function finalizers.bibtexshort(collected) + if collected then + local c = collected[1] + local year = xmlfilter(c,"xml://field[@name='year']/text()") + local author = xmlfilter(c,"xml://field[@name='author']/text()") + texsprint(ctxcatcodes,authors.short(author,year)) + end + end + + -- experiment: + + --~ -- alternative approach: keep data at the tex end + + --~ local function xbibtexconcat(t,sep,finalsep,lastsep) + --~ local n = #t + --~ if n > 0 then + --~ context(t[1]) + --~ if n > 1 then + --~ if n > 2 then + --~ for i=2,n-1 do + --~ context.bibtexpublicationsparameter("sep") + --~ context(t[i]) + --~ end + --~ context.bibtexpublicationsparameter("finalsep") + --~ else + --~ context.bibtexpublicationsparameter("lastsep") + --~ end + --~ context(t[n]) + --~ end + --~ end + --~ end + + -- todo : sort + + -- todo: choose between bibtex or commands namespace + + function bibtex.authorref(id,list) + local result = collectauthoryears(id,list,method,what) + for author, years in next, result do + texsprint(ctxcatcodes,authors.concat(author,method,what,settings)) + end + end + + function bibtex.authoryearref(id,list) + local result = collectauthoryears(id,list,method,what) + for author, years in next, result do + texsprint(ctxcatcodes,authors.concat(author,method,what,settings)," (",concat(years,", "),")") + end + end + + function bibtex.authoryearsref(id,list) + local result = collectauthoryears(id,list,method,what) + for author, years in next, result do + texsprint(ctxcatcodes,"(",authors.concat(author,method,what,settings),", ",concat(years,", "),")") + end + end + + function bibtex.singular_or_plural(singular,plural) + if lastconcatsize and lastconcatsize > 1 then + texsprint(ctxcatcodes,plural) + else + texsprint(ctxcatcodes,singular) + end + end end + + +--~ local function test(sample) +--~ local authors = splitauthors(sample) +--~ print(table.serialize(authors)) +--~ for i=1,#authors do +--~ local author = authors[i] +--~ print(normalauthor (author,settings)) +--~ print(normalshortauthor (author,settings)) +--~ print(invertedauthor (author,settings)) +--~ print(invertedshortauthor(author,settings)) +--~ end +--~ print(concatauthors(sample,settings,normalauthor)) +--~ print(concatauthors(sample,settings,normalshortauthor)) +--~ print(concatauthors(sample,settings,invertedauthor)) +--~ print(concatauthors(sample,settings,invertedshortauthor)) +--~ end + +--~ local sample_a = "Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der" +--~ local sample_b = "Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut" + +--~ test(sample_a) +--~ test(sample_b) diff --git a/Master/texmf-dist/tex/context/base/bibl-bib.mkiv b/Master/texmf-dist/tex/context/base/bibl-bib.mkiv index 51db67ed7da..10abe5cb8a7 100644 --- a/Master/texmf-dist/tex/context/base/bibl-bib.mkiv +++ b/Master/texmf-dist/tex/context/base/bibl-bib.mkiv @@ -11,19 +11,952 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\writestatus{loading}{ConTeXt Bibliography Support / BibTeX} +\writestatus{loading}{ConTeXt Bibliography Support / Experimental BibTeX} \registerctxluafile{bibl-bib}{1.001} \unprotect -\def\definebibtexsession [#1]{\ctxlua{commands.definebibtexsession("#1")}} -\def\preparebibtexsession [#1]{\ctxlua{commands.preparebibtexsession("#1")}} +% todo: et al limiters +% todo: split: citationvariant and publicationvariant + +%D This interface is under development. As I don't use \BIBTEX\ myself I need +%D some motivation to spend time on it, and an occasional question on the +%D list can be a reason. A few examples. As \BIBTEX\ databases can be poluted +%D by local commands, we need to catch: +%D +%D \startbuffer +%D \defbibtexcommand\MF {MF} +%D \defbibtexcommand\MP {MP} +%D \defbibtexcommand\TUB {TUGboat} +%D \defbibtexcommand\Mc {Mac} +%D \defbibtexcommand\sltt{\tt} +%D \defbibtexcommand\<#1>{\type{#1}} +%D \defbibtexcommand\acro#1{#1} +%D \stopbuffer +%D +%D \typebuffer +%D +%D Let's define a session and load a few databases. We convert to \UTF\ and +%D strip commands. +%D +%D \startbuffer +%D \definebibtexsession [somebibtex] +%D \registerbibtexfile [somebibtex] [tugboat.bib] +%D \registerbibtexfile [somebibtex] [komoedie.bib] +%D \preparebibtexsession [somebibtex] [convert,strip] +%D \stopbuffer +%D +%D \typebuffer +%D +%D This loads an mapping (work in progress): +%D +%D \startbuffer +%D \def\currentbibtexformat{apa} \input bxml-\currentbibtexformat.mkiv +%D \stopbuffer +%D +%D \typebuffer +%D +%D There are several ways to handle the \XML. It helps if you're a bit +%D familiar with \XML\ processing in \MKIV. +%D +%D Here we regular setups. Three elements are mapped but only one +%D is actually used and applied to root element \type {/bibtex}. +%D +%D \startbuffer +%D \startxmlsetups bibtex +%D \xmlregistereddocumentsetups{#1}{} +%D \xmlsetsetup{#1}{bibtex|entry|field}{bibtex:*} +%D \xmlmain{#1} +%D \stopxmlsetups +%D +%D \startxmlsetups bibtex:bibtex +%D \xmlfilter{#1}{ +%D /entry[@category='article'] +%D /field[@name='author' and (find(text(),'Hagen') or find(text(),'Hoekwater'))] +%D /../command(bibtex:format) +%D } +%D \stopxmlsetups +%D +%D \applytobibtexsession[somebibtex][bibtex] +%D \stopbuffer +%D +%D \typebuffer +%D +%D A simpler setup is given next. Here we just apply a setup to the root +%D element directly: +%D +%D \startbuffer +%D \startxmlsetups bibtex:list +%D \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)} +%D \stopxmlsetups +%D +%D \applytobibtexsession[somebibtex][bibtex:list] +%D \stopbuffer +%D +%D \typebuffer +%D +%D A slightly more complex expression: +%D +%D \startbuffer +%D \startxmlsetups bibtex:filter +%D \xmlfilter{#1}{ +%D /bibtex +%D /entry[@category='article'] +%D /field[@name='author' and (find(text(),'Hagen') or find(text(),'Hoekwater'))] +%D /../command(bibtex:format) +%D } +%D \stopxmlsetups +%D +%D \applytobibtexsession[somebibtex][bibtex:filter] +%D \stopbuffer +%D +%D \typebuffer + +\newtoks \everydefinebibtexsession +\newtoks \everypreparebibtexsession +\newtoks \everysetupbibtexsession +\setfalse \tracebibtexformat + +\unexpanded\def\definebibtexsession {\dosingleargument\dodefinebibtexsession} +\def\preparebibtexsession {\dodoubleempty \dopreparebibtexsession} +\unexpanded\def\setupbibtexsession {\dodoubleargument\dosetupbibtexsession} + +\def\dodefinebibtexsession [#1]{\edef\currentbibtexsession{#1}% + \ctxlua{commands.definebibtexsession("#1")}% + \the\everydefinebibtexsession} + +\def\dopreparebibtexsession[#1][#2]{\edef\currentbibtexsession{#1}% + \ctxlua{commands.preparebibtexsession("#1","bibtex:#1","#2")}% + \the\everypreparebibtexsession} + +\def\dosetupbibtexsession [#1][#2]{\edef\currentbibtexsession{#1}% + \getparameters[\??pb#1][#2]% + \the\everysetupbibtexsession} + \def\registerbibtexfile {\dodoubleargument\doregisterbibtexfile} -\def\doregisterbibtexfile [#1][#2]{\ctxlua{commands.registerbibtexfile("#1","#2")}} % also the fast one \def\registerbibtexentry {\dodoubleargument\doregisterbibtexentry} -\def\doregisterbibtexentry [#1][#2]{\ctxlua{commands.registerbibtexentry("#1","#2")}} % also the fast one \def\applytobibtexsession {\dodoubleargument\doapplytobibtexsession} + +\def\doregisterbibtexfile [#1][#2]{\ctxlua{commands.registerbibtexfile("#1","#2")}} +\def\doregisterbibtexentry [#1][#2]{\ctxlua{commands.registerbibtexentry("#1","#2")}} \def\doapplytobibtexsession[#1][#2]{\xmlprocessregistered{bibtex:#1}{#2}{#2}} +\unexpanded\def\bibtexcommand#1% + {\ifcsname\??pb:c:#1\endcsname \else + \fakebibtexcommand{#1}% + \fi + \csname\??pb:c:#1\endcsname} + +\def\fakebibtexcommand#1% + {\ifcsname#1\endcsname + \writestatus{bibtex}{unknown command: #1, using built-in context variant}% + \setugvalue{\??pb:c:#1}{\dosomebibtexcommand{#1}}% + \else + \writestatus{bibtex}{unknown command: #1}% + \setugvalue{\??pb:c:#1}{\dofakebibtexcommand{#1}}% + \fi} + +\let\dosomebibtexcommand \getvalue +\def\dofakebibtexcommand#1{{\tttf#1}} + +\def\defbibtexcommand#1% + {\setuvalue{\strippedcsname#1}} + +\def\bibxmldoifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}} +\def\bibxmldoif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}} +\def\bibxmldoifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}} +\def\bibxmlflush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}} +\def\bibxmlsetup {\xmlsetup \currentbibxmlnode} % {#1} + +\def\currentbibtexformat{apa} % ho wto interface this, maybe split loading and key +\def\currentbibxmlnode {unset} +\def\currentbibxmltag {unset} + +\startxmlsetups bibtex + \xmlregistereddocumentsetups{#1}{} + \xmlsetsetup{#1}{bibtex|entry|field}{bibtex:*} + \xmlmain{#1} +\stopxmlsetups + +\startxmlsetups bibtex:format + \bibtexpublicationsparameter\c!before\relax % prevents lookahead + \edef\currentbibxmlnode {#1} + \edef\currentbibxmltag {\xmlatt{#1}{tag}} + \edef\currentbibxmlcategory{\xmlatt{#1}{category}} + \ifconditional\tracebibtexformat + \tracedbibxmlintro\currentbibxmltag + \tracedbibxmlintro\currentbibxmlcategory + \fi + \ignorespaces + \xmlcommand{#1}{.}{bibtex:\currentbibtexformat:\currentbibxmlcategory} + \removeunwantedspaces + \bibtexpublicationsparameter\c!after\relax % prevents lookahead +\stopxmlsetups + +\startxmlsetups bibtex:list + \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)} +\stopxmlsetups + +\startxmlsetups bibtex:bibtex + \xmlfilter{#1}{/entry/command(bibtex:format)} +\stopxmlsetups + +% formatters + +\let\normalbibxmlflush\bibxmlflush + +\definecolor[bibtextracecolor:field] [darkred] +\definecolor[bibtextracecolor:crossref][darkblue] +\definecolor[bibtextracecolor:key] [darkgreen] + +\def\tracedbibxmlintro #1{{\tttf#1 -> }} +\def\tracedbibxmlflush #1{\color[bibtextracecolor:field] {\tttf[#1]}} +\def\tracedbibxmltexts #1{\color[bibtextracecolor:field] {\tttf<#1>}} +\def\tracedbibxmlcrossref#1{\color[bibtextracecolor:crossref]{\tttf#1}} +\def\tracedbibxmlkey #1{\color[bibtextracecolor:key] {\tttf#1}} + +\def\tracedbibxmltext + {\ifconditional\tracebibtexformat + \expandafter\tracedbibxmltexts % plural + \else + \expandafter\firstofoneargument + \fi} + +\def\bibxmlflush + {\ifconditional\tracebibtexformat + \expandafter\tracedbibxmlflush + \else + \expandafter\normalbibxmlflush + \fi} + +\startxmlsetups bibtex:format:crossref + \ifconditional\tracebibtexformat + \tracedbibxmlcrossref{\xmlfirst\currentbibxmlnode{/field[@name='crossref']/lower()}} + \else + \cite[\xmlfirst\currentbibxmlnode{/field[@name='crossref']/lower()}] + \fi +\stopxmlsetups + +\startxmlsetups bibtex:format:key + \ifconditional\tracebibtexformat + \tracedbibxmlkey{\normalbibxmlflush{key}} + \else + \bibxmlflush{key} + \fi +\stopxmlsetups + +\startxmlsetups bibtex:format:common:author + \ifconditional\tracebibtexformat + \bibxmlflush\currentbibtexvariant + \else + \xmlfilter{#1}{/field[@name='\currentbibtexvariant']/bibtexconcat('\currentbibtexvariant')} + \fi +\stopxmlsetups + +\startxmlsetups bibtex:format:author + \begingroup + \def\currentbibtexvariant{author} + \xmlsetup{#1}{bibtex:format:common:author} + \endgroup +\stopxmlsetups + +\startxmlsetups bibtex:format:artauthor + \begingroup + \def\currentbibtexvariant{artauthor} + \xmlsetup{#1}{bibtex:format:common:author} + \endgroup +\stopxmlsetups + +\startxmlsetups bibtex:format:editor + \begingroup + \def\currentbibtexvariant{editor} + \xmlsetup{#1}{bibtex:format:common:author} + \endgroup +\stopxmlsetups + +\startxmlsetups bibtex:format:doi +% \bibdoifelse{\@@pb@doi}{#1\expanded{\bibgotoDOI{\@@pb@thekey}{\@@pb@doi}}#2}{#3} + *doi* +\stopxmlsetups + + +\startxmlsetups bibtex:format:doi +% \bibdoifelse{\@@pb@biburl}{#1\expanded{\bibgotoURL{\@@pb@thekey}{\@@pb@biburl}}#2}{#3} + *url* +\stopxmlsetups + +\startxmlsetups bibtex:format:month +% {\bibdoifelse\@@pb@month +% {#1\doifnumberelse\@@pb@month +% {\doifconversiondefinedelse\@@pbmonthconversion +% {\convertnumber\@@pbmonthconversion\@@pb@month}{\@@pb@month}}% +% {\@@pb@month}#2}% +% {#3} + *month* +\stopxmlsetups + +% lists + +\def\bibtexlistprocessor + {\ctxlua{bibtex.hacks.add(structure.lists.uservalue("\currentlist",\currentlistindex,"bibref"),\currentlistindex)}} + +\appendtoks + \definelist[\currentbibtexsession]% + \setuplist[\currentbibtexsession][\c!state=\v!start,\c!width=]% + \installstructurelistprocessor{\currentbibtexsession:userdata}{\bibtexlistprocessor}% +\to \everydefinebibtexsession + +% \def\installbibtexsorter#1#2% +% {\setvalue{\??pb:\c!sort:#1}{#2}} + +% \installbibtexsorter\v!no {no} +% \installbibtexsorter\v!author {au} +% \installbibtexsorter\v!title {ti} +% \installbibtexsorter\v!short {ab} +% \installbibtexsorter\empty {no} +% \installbibtexsorter\s!default{no} + +% \setupbibtex +% [\c!sorttype=\v!cite, +% \c!sort=no] + +% \long\unexpanded\def\startpublication#1\stoppublication +% {\blank +% todo +% \blank} + +% \let\stoppublication\relax + +\unexpanded\def\bibtexspace {\removeunwantedspaces\space} +\unexpanded\def\bibtexperiod {\removeunwantedspaces.\space} +\unexpanded\def\bibtexcomma {\removeunwantedspaces,\space} +\unexpanded\def\bibtexlparent {\removeunwantedspaces\space(} +\unexpanded\def\bibtexrparent {\removeunwantedspaces)\space} +\unexpanded\def\bibtexlbracket{\removeunwantedspaces\space[} +\unexpanded\def\bibtexrbracket{\removeunwantedspaces]\space} + +% interfacing + +% todo : lang en language +% todo : directions + + +% variables + +\newcount\bibtexblock \bibtexblock\plusone + +\newtoks \everysetupbibtexpublications +\newtoks \everysetupbibtexcitations +\newcount\bibtexcounter + +\def\bibtexrefprefix{\number\bibtexblock:} + +\let\currentbibtexsession\s!default +\let\currentbibtexvariant\s!default + +% parameters: session+variant variant session shared + +\def\bibtexpublicationsparameter#1% + {\csname + \ifcsname\??pb\currentbibtexsession:\currentbibtexvariant#1\endcsname + \??pb\currentbibtexsession:\currentbibtexvariant#1% + \else\ifcsname\??pb:\currentbibtexvariant#1\endcsname + \??pb:\currentbibtexvariant#1% + \else\ifcsname\??pb\currentbibtexsession#1\endcsname + \??pb\currentbibtexsession#1% + \else\ifcsname\??pb#1\endcsname + \??pb#1% + \else + \s!empty + \fi\fi\fi\fi + \endcsname} + +\def\bibtexcitationparameter#1% + {\csname + \ifcsname\??pv\currentbibtexsession:\currentbibtexvariant#1\endcsname + \??pv\currentbibtexsession:\currentbibtexvariant#1% + \else\ifcsname\??pv:\currentbibtexvariant#1\endcsname + \??pv:\currentbibtexvariant#1% + \else\ifcsname\??pv\currentbibtexsession#1\endcsname + \??pv\currentbibtexsession#1% + \else\ifcsname\??pv#1\endcsname + \??pv#1% + \else + \s!empty + \fi\fi\fi\fi + \endcsname} + +% setup commands + +\unexpanded\def\setupbibtexpublications + {\let\currentpublicationclass\??pb + \let\everysetupbibtexwhatever\everysetupbibtexpublications + \dodoubleargument\dosetupbibtexwhatever} + +\unexpanded\def\setupbibtexcitations + {\let\currentpublicationclass\??pv + \let\everysetupbibtexwhatever\everysetupbibtexcitations + \dodoubleargument\dosetupbibtexwhatever} + +\unexpanded\def\setupbibtexpublicationvariants + {\let\currentpublicationclass\??pb + \let\everysetupbibtexwhatever\everysetupbibtexpublications + \dotripleargument\dosetupbibtexwhatevervariant} + +\unexpanded\def\setupbibtexcitationvariants + {\let\currentpublicationclass\??pv + \let\everysetupbibtexwhatever\everysetupbibtexcitations + \dotripleargument\dosetupbibtexwhatevervariant} + +\def\dosetupbibtexwhatever[#1][#2]% [sessionlist] [setup] + {\ifsecondargument + % sessions setups + \def\dobtxcommand##1{\getparameters[\currentpublicationclass##1][#2]}% + \processcommalist[#1]\dobtxcommand + \else + % setups + \getparameters[\currentpublicationclass][#1]% + \the\everysetupbibtexwhatever + \fi} + +\def\dosetupbibtexwhatevervariant[#1][#2][#3]% [sessionlist] [variantlist] [setup] + {\ifthirdargument + % sessions variants setups + \def\dobtxcommand##1% + {\def\dodobtxcommand####1{\getparameters[\currentpublicationclass##1:####1][#3]}% + \processcommalist[#2]\dodobtxcommand}% + \processcommalist[#1]\docbtxommand + \else\ifsecondargument + % variants setups + \def\dobtxcommand##1{\getparameters[\currentpublicationclass:##1][#2]}% + \processcommalist[#1]\dobtxcommand + \else + % setups + \getparameters[\currentpublicationclass][#1]% + \the\everysetupbibtexwhatever + \fi\fi} + +% some initializations + +\setupbibtexcitationvariants + [author,authoryear,authoryears] + [\c!namesep={, }] + +% loading alternatives (apa etc) + +\def\doloadbibtexpublicationalternative + {\ifproductionrun + \edef\bibtexpublicationsalternative{\@@pbalternative}% parent + \ifx\bibtexpublicationsalternative\empty \else + \processcommacommand[\bibtexpublicationsalternative]\dodoloadbibtexpublicationalternative + \let\@@pbalternative\empty + \fi + \fi} + +\def\dodoloadbibtexpublicationalternative#1% + {\doonlyonce{#1} + {\readsysfile{bxml-#1.mkiv} + {\showmessage\m!publications{6}{bxml-#1}} + {\showmessage\m!publications{1}{bxml-#1}}}} + +\appendtoks + \doloadbibtexpublicationalternative +\to \everysetupbibtexpublications + +\appendtoks + \doloadbibtexpublicationalternative +\to \everyjob + +% whatever, should be key + +\def\bibtexleftnumber#1{#1\hfill~} + +% testing + +% \showmessage\m!publications{5}{#1 is unknown}\secondoftwoarguments} + +\let\doifbibreferencefoundelse\secondofthreearguments + +% lists + +\newtoks\everysetupbibtexlistplacement + +% this will change as we need it too often .. we will use context.thebibtexnamesep + +\appendtoks + \ctxlua {bibtex.authors.setsettings { + namesep = \!!bs\bibtexpublicationsparameter\c!namesep\!!es, + lastnamesep = \!!bs\bibtexpublicationsparameter\c!lastnamesep\!!es, + finalnamesep = \!!bs\bibtexpublicationsparameter\c!finalnamesep\!!es, + firstnamesep = \!!bs\bibtexpublicationsparameter\c!firstnamesep\!!es, + juniorsep = \!!bs\bibtexpublicationsparameter\c!juniorsep\!!es, + vonsep = \!!bs\bibtexpublicationsparameter\c!vonsep\!!es, + surnamesep = \!!bs\bibtexpublicationsparameter\c!surnamesep\!!es, + namesep = \!!bs\bibtexpublicationsparameter\c!namesep\!!es, + lastnamesep = \!!bs\bibtexpublicationsparameter\c!lastnamesep\!!es, + finalnamesep = \!!bs\bibtexpublicationsparameter\c!finalnamesep\!!es, + author = { + etallimit = \!!bs\bibtexpublicationsparameter\c!authoretallimit\!!es, + etaldisplay = \!!bs\bibtexpublicationsparameter\c!authoretaldisplay\!!es, + etaltext = \!!bs\bibtexpublicationsparameter\c!authoretaltext\!!es, + }, + editor = { + etallimit = \!!bs\bibtexpublicationsparameter\c!editoretallimit\!!es, + etaldisplay = \!!bs\bibtexpublicationsparameter\c!editoretaldisplay\!!es, + etaltext = \!!bs\bibtexpublicationsparameter\c!editoretaltext\!!es, + }, + artauthor = { + etallimit = \!!bs\bibtexpublicationsparameter\c!artauthoretallimit\!!es, + etaldisplay = \!!bs\bibtexpublicationsparameter\c!artauthoretaldisplay\!!es, + etaltext = \!!bs\bibtexpublicationsparameter\c!artauthoretaltext\!!es, + }, + } }% +\to \everysetupbibtexlistplacement + +\def\completebibtexpublications{\dodoubleempty\docompletebibtexpublications} +\unexpanded\def\placebibtexpublications {\dodoubleempty\doplacebibtexpublications} + +\def\docompletebibtexpublications[#1][#2]% title might become obsolete, just headtext + {\begingroup + \edef\currentbibtexsession{#1}% + \let\currentlist\currentbibtexsession + \setuplist[\currentbibtexsession][\c!criterium=\v!previous,#2] + \doifelsenothing{\namedlistparameter\currentbibtexsession\c!title} + {\systemsuppliedchapter[\currentbibtexsession]{\headtext{\currentbibtexsession}}} + {\normalexpanded{\systemsuppliedchapter[\currentbibtexsession]{\namedlistparameter\currentbibtexsession\c!title}}}% + \dodoplacebibtexpublications} + +\def\doplacebibtexpublications[#1][#2]% + {\begingroup + \edef\currentbibtexsession{#1}% + \let\currentlist\currentbibtexsession + \setuplist[\currentbibtexsession][\c!criterium=\v!previous,#2]% + \dodoplacebibtexpublications} + +\def\dodoplacebibtexpublications + {\determinelistcharacteristics[\currentbibtexsession]% + \the\everysetupbibtexlistplacement + \forgetall + \typesetbibtexlist + \endgroup + \global\advance\bibtexblock\plusone} + +\setvalue{\??pb:\c!numbering:\v!short}#1% todo var s -> short tag + {\bibtexlistnumberbox{\bibtexpublicationsparameter\c!numbercommand{\bibtexgetshort\currentpublicationtag}}} + +\setvalue{\??pb:\c!numbering:\v!bib}#1% todo var n -> number + {\bibtexlistnumberbox{\bibtexpublicationsparameter\c!numbercommand{\bibtexgetnumber\currentpublicationtag}}} + +\setvalue{\??pb:\c!numbering:\s!unknown}#1% + {\bibtexlistnumberbox{\bibtexpublicationsparameter\c!numbercommand{#1}}} + +\def\@@pblimitednumber % name + {\csname\??pb:\c!numbering:% + \ifcsname\??pb:\c!numbering:\currentbibtexnumbering\endcsname + \currentbibtexnumbering + \else + \s!unknown + \fi + \endcsname} + +\appendtoks + \edef\currentbibtexnumbering{\bibtexpublicationsparameter\c!numbering}% + \ifx\currentbibtexnumbering\v!no + \setuplist[\currentbibtexsession][\c!numbercommand=,\c!symbol=\v!none,\c!textcommand=\outdented]% + \else + \setuplist[\currentbibtexsession][\c!numbercommand=\@@pblimitednumber]% + \fi +\to \everysetupbibtexlistplacement + +\newdimen\bibtexnumberwidth + +\def\bibtexlistnumberbox{\hbox \ifcase\bibtexnumberwidth\else to \bibtexnumberwidth\fi} + +\appendtoks + \doifelse{\bibtexpublicationsparameter\c!autohang}\v!yes + {\ifx\currentbibtexnumbering\v!short + \setbox\scratchbox\hbox{\bibtexpublicationsparameter\c!numbercommand{\bibtexpublicationsparameter\c!samplesize}}% + \else + \setbox\scratchbox\hbox{\bibtexpublicationsparameter\c!numbercommand{\ctxlua{tex.write(structure.lists.size())}}}% + \fi + \bibtexnumberwidth\wd\scratchbox + \setuplist[\currentbibtexsession][\c!distance=\zeropoint]} + {\doifelsenothing{\bibtexpublicationsparameter\c!width} + {\bibtexnumberwidth\zeropoint} + {\bibtexnumberwidth\bibtexpublicationsparameter\c!width}}% + \setuplist[\currentbibtexsession][\c!width=\bibtexnumberwidth]% +\to \everysetupbibtexlistplacement + +\appendtoks + \let\maybeyear\gobbleoneargument + \let\noopsort \gobbleoneargument +\to \everysetupbibtexlistplacement + +\appendtoks + \doifelse{\bibtexpublicationsparameter\c!maybeyear}\v!off + {\let\maybeyear\gobbleoneargument} + {\let\maybeyear\firstofoneargument}% +\to \everysetupbibtexlistplacement + +\appendtoks + \doifnot{\bibtexpublicationsparameter\c!option}\v!continue + {\global\bibtexcounter\zerocount}% +\to \everysetupbibtexlistplacement + +\appendtoks + \edef\currentbibtexcriterium{\namedlistparameter\currentbibtexsession\c!criterium}% +\to \everysetupbibtexlistplacement + +\def\typesetbibtexlist + {\dobeginoflist + \doif{\namedlistparameter\currentbibtexsession\c!criterium}\v!cite + {\setuplist[\currentbibtexsession][\c!criterium=\v!here]}% + \doifelse{\bibtexpublicationsparameter\c!method}\v!local + {\ctxlua{bibtex.hacks.reset(1)}}% function can take method + {\ctxlua{bibtex.hacks.reset(2)}}% + \placestructurelist + {\currentbibtexsession} + {\currentbibtexcriterium} + {\namedlistparameter\currentbibtexsession\c!number}% + \ctxlua{bibtex.hacks.flush("\bibtexpublicationsparameter\c!sorttype")}% + \doendoflist} + +\unexpanded\def\typesetbibtexpublication#1% + {\edef\currentbibtexsessiontag{#1}% + \ifx\currentbibtexsessiontag\empty + % can't really happen + \else\ifx\currentbibtexcriterium\v!all + \doplacepublicationindeed + \else + \ctxlua{bibtex.hacks.doifalreadyplaced("\currentbibtexsessiontag")} + \donothing + \dotypesetbibtexpublication + \fi\fi} + +\def\dotypesetbibtexpublication + {\doifbibreferencefoundelse\currentbibtexsessiontag + {\global\advance\bibtexcounter\plusone + \ctxlua{bibtex.hacks.registerplaced("\currentbibtexsessiontag")}% + \dodolistelement{\currentbibtexsession}{}{\number\bibtexcounter}{\thebibtexpublicationlistelement}{}{}} + {}} % invalid + +\def\thebibtexpublicationlistelement + {\strut + \expanded{\reference[\bibtexrefprefix\currentbibtexsessiontag]{\number\bibtexcounter}}% + \dotypesetabibtexpublication\currentbibtexsessiontag + \strut} + +\def\dotypesetabibtexpublication#1% + {\begingroup + \ignorespaces + \xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='#1']/command(bibtex:format)}% + \removeunwantedspaces +% \ignorespaces +% \bibalternative{\bibgetvart{#1}}% +% \removeunwantedspaces + \endgroup} + +\def\doprocessbibtexentry#1{\typesetbibtexpublication{#1}} + +% citations + +\unexpanded\def\bibtexcitation[#1]% + {\edef\currentbibtexsession{#1}% + \strictdoifnextoptionalelse\dobibtexcitation\dobibtexref} + +\def\dobibtexref#1% + {\dodobibtexcitation[#1][]} + +\def\dobibtexcitation[#1]% + {\strictdoifnextoptionalelse{\dodobibtexcitation[#1]}{\dodobibtexcitation[#1][]}} + +\def\dodobibtexcitation[#1][#2]% + {\dontleavehmode + \begingroup + \doifelsenothing{#2}\secondargumentfalse\secondargumenttrue + \ifsecondargument + \dowhateverbibtexcitation{#1}{#2}% + \else + \donumberedbibtexcitation{#1}% + \fi + \endgroup} + +\def\dowhatevercitation#1#2% + {\processcommalist[#2]\dobibtexcitationindeed + \setupinteraction[\c!style=]% use flag instead + \doifassignmentelse{#1} + {\getparameters[\??pb\??pb][\c!alternative=,\c!extras=,#1]% + \edef\currentbibtexvariant{\@@pb@@pbalternative}% + \ifx\currentbibtexvariant\empty + \edef\currentbibtexvariant{\bibtexpublicationparameter\c!refcommand}% + \fi + \ifx\@@pb@@pbextras\empty + \setupcite[\currentbibtexvariant][#1]% + \else + \edef\@@pb@@pbextras{{\@@pb@@pbextras\ifdefined\@@pb@@pbright\@@pb@@pbright\else\bibtexpublicationparameter\c!right\fi}}% + \expanded{\setupcite[\currentbibtexvariant][#1,\c!right=\@@pb@@pbextras]}% + \fi}% + {\def\currentbibtexvariant{#1}}% + \getvalue{bibtex\currentbibtexvariant ref}[#2]} + +\def\donumberedbibtexcitation#1% + {\processcommalist[#1]\dobibtexcitationindeed + \setupinteraction[\c!style=]% + \edef\currentbibtexvariant{\bibtexcitationparameter\c!refcommand}% + \getvalue{bibtex\currentbibtexvariant ref}[#1]} + +\def\dobibtexcitationindeed#1% + {\iftrialtypesetting \else + \expanded{\writedatatolist[\currentbibtexsession][bibref=#1]}% + \fi} + +\def\nobibtexcitation[#1]% + {\processcommalist[#1]\dobibtexcitationindeed} + +\def\bibtexnumref[#1]% + {\dontleavehmode + \begingroup + \bibtexcitationparameter\v!left + \penalty\!!tenthousand + \ctxlua{bibtex.hacks.resolve("","\number\bibtexblock","#1")}% + \bibtexcitationparameter\v!right + \endgroup} + +\def\dowithbibtexnumrefconnector#1#2% + {\ifnum#1>\plusone + \ifnum#2>\plusone + \ifnum#2=#1\relax + \bibtexpublicationsparameter\c!lastpubsep + \else + \bibtexpublicationsparameter\c!pubsep + \fi + \fi + \fi} + +\def\dowithbibtexnumref#1#2#3#4#5% n, i, prefix block ref + {\dowithbibtexnumrefconnector{#1}{#2}% + \def\bibtexrefprefix{#4:}% + \inbiblink[#5]} + +\def\dowithbibtexnumrefrange#1#2#3#4#5#6#7% n, i, prefix block ref + {\dowithbibtexnumrefconnector{#1}{#2}% + \def\bibtexrefprefix{#4:}% + \inbiblink[#5]% + \endash + \def\bibtexrefprefix{#6:}% + \inbiblink[#7]} + +\def\nobibtexnumref#1% + {[#1]} + +% hm + +% \def\@@pbinumbercommand{\executeifdefined{\??pb:\c!numbercommand:\@@pbnumbering}\firstofoneargument} + +% \letvalue{\??pb:\c!numbercommand:\v!yes }\firstofoneargument +% \letvalue{\??pb:\c!numbercommand:\v!no }\gobbleoneargument +% \setvalue{\??pb:\c!numbercommand:\v!short}{\bibtexgetshort\currentpublicationtag\gobbleoneargument} +% \setvalue{\??pb:\c!numbercommand:\v!bib }{\bibtexgetnumber\currentpublicationtag\gobbleoneargument} + +% \def\bibalternative#1{\csname\??pv\@@currentalternative#1\endcsname} + +% basic setup + +% parent -> publicationlist +% +% \setuplist +% [\currentbibtexsession] +% [\c!samplesize={AA99}, +% \c!alternative=a, +% \c!interaction=, +% \c!pagenumber=\v!no, +% #1, +% \c!command=] + +% \setuppublicationlist +% [\c!title=, +% \c!command=\dospecialbibinsert, +% \c!maybeyear=\v!on] + +\setupbibtexpublications + [\c!monthconversion=, + \c!alternative=apa, + \c!method=\v!global, + \c!refcommand=num, + \c!numbercommand=\bibtexleftnumber] + +\setupbibtexcitations % command ? + [\c!refcommand=num] + +% helpers + +\def\doifbibtexinteractionelse + {\iflocation + \edef\temp{\bibtexcitationparameter\c!interaction}% + \ifx\temp\v!stop + \@EA@EA@EA\secondoftwoarguments + \else + \@EA@EA@EA\firstoftwoarguments + \fi + \else + \@EA\secondoftwoarguments + \fi} + +% variants + +% todo: lastsep here + +\newconditional\firstbibtexrefsep + +\def\bibtexresetrefsep + {\settrue\firstbibtexrefsep} + +\def\bibtexinsertrefsep + {\ifconditional\firstbibtexrefsep + \setfalse\firstbibtexrefsep + \else + \bibtexcitationparameter\c!pubsep + \fi} + +\def\inbibtexlink#1#2% + {\doifreferencefoundelse{\bibtexrefprefix#1} + {\goto{#2}[\bibtexrefprefix#1]} + {!#1!\unknownreference{#1}}} + +\def\dobibtexgotolink#1#2% + {\doifreferencefoundelse{\bibtexrefprefix#1} + {\goto{#2}[\bibtexrefprefix#1]} + {!#1!\unknownreference{#1}}} + +\def\dobibattexlink#1#2% + {\doifreferencefoundelse{\bibtexrefprefix#1} + {\at{#2}[\bibtexrefprefix#1]} + {!#1!\unknownreference{#1}}} + +\def\dobibtexurllink#1#2% + {\expanded{\useURL[bibtex:url:#1][#2]}% + \doifbibtexinteractionelse + {\goto{\url[bibtex:url:#1]}[url(bibtex:url:#1)]} + {\url[bibtex:url:#1]}} + +% todo: style, color + +\unexpanded\def\bibtexdataref {\dodoubleargument\dobibtexdataref} +\unexpanded\def\bibtextyperef {\dodoubleargument\dobibtextyperef} +\unexpanded\def\bibtexkeyref {\dodoubleargument\dobibtexkeyref} +\unexpanded\def\bibtexserialref {\dodoubleargument\dobibtexserialref} +\unexpanded\def\bibtexurlref {\dodoubleargument\dobibtexurlref} +\unexpanded\def\bibtexdoiref {\dodoubleargument\dobibtexdoiref} +\unexpanded\def\bibtexpageref {\dodoubleargument\dobibtexpageref} +\unexpanded\def\bibtexnoneref {\dodoubleargument\dobibtexnoneref} +\unexpanded\def\bibtexshortref {\dodoubleargument\dobibtexshortref} +\unexpanded\def\bibtexyearref {\dodoubleargument\dobibtexyearref} +\unexpanded\def\bibtexauthorref {\dodoubleargument\dobibtexauthorref} +\unexpanded\def\bibtexauthoryearref {\dodoubleargument\dobibtexauthoryearref} +\unexpanded\def\bibtexauthoryearsref{\dodoubleargument\dobibtexauthoryearsref} + +\def\dobibtexdataref {\doprocessbibtexref\dodobibtexdataref {ref}} % [#1][#2] +\def\dobibtextyperef {\doprocessbibtexref\dodobibtextyperef {type}} % [#1][#2] +\def\dobibtexkeyref {\doprocessbibtexref\dodobibtexkeyref {key}} % [#1][#2] +\def\dobibtexserialref {\doprocessbibtexref\dodobibtexserialref {serial}} % [#1][#2] +\def\dobibtexurlref {\doprocessbibtexref\dodobibtexurlref {url}} % [#1][#2] +\def\dobibtexdoiref {\doprocessbibtexref\dodobibtexdoiref {doi}} % [#1][#2] +\def\dobibtexpageref {\doprocessbibtexref\dodobibtexpageref {page}} % [#1][#2] +\def\dobibtexnoneref {\doprocessbibtexref\dodobibtexnoneref {none}} % [#1][#2] +\def\dobibtexshortref {\doprocessbibtexref\dodobibtexshortref {short}} % [#1][#2] +\def\dobibtexyearref {\doprocessbibtexref\dodobibtexyearref {year}} % [#1][#2] +\def\dobibtexauthorref {\doprocessbibtexref\dodobibtexauthorref {author}} % [#1][#2] +\def\dobibtexauthoryearref {\doprocessbibtexref\dodobibtexauthoryearref {authoryear}} % [#1][#2] +\def\dobibtexauthoryearsref{\doprocessbibtexref\dodobibtexauthoryearsref{authoryears}} % [#1][#2] + +\def\doprocessbibtexref#1#2[#3][#4]% + {\edef\currentbibtexsession{#3}% + \edef\currentbibtexvariant{#2}% + \def\dodoprocessbibtexref##1% + {% test for existence + \edef\currentbibtextag{##1}% + \bibtexinsertrefsep + #1{##1}}% + \bibtexresetrefsep + \bibtexcitationparameter\v!left + \processcommalist[#4]\dodoprocessbibtexref\relax + \bibtexcitationparameter\v!right} + +\def\dodobibtexdataref#1% + {\dotypesetabibtexpublication{#1}} + +\def\dodobibtextyperef#1% + {\edef\bibtexrefcontent{\xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='#1']/attribute('category')}}% + \bibtexrefcontent} + +\def\dodobibtexkeyref#1% + {\edef\bibtexrefcontent{\xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='key']/context()}}% + \dobibtexgotolink{#1}{\bibtexrefcontent}} + +\def\dodobibtexserialref#1% + {\edef\bibtexrefcontent{\xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='#1']/match()}}% + \dobibtexgotolink{#1}{\bibtexrefcontent}} + +\def\dodobibtexurlref#1% + {\edef\bibtexrefcontent{\xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='#1']/field[@name='url']/context()}}% + \dobibtexurllink{#1}{\bibtexrefcontent}} + +\def\dodobibtexdoiref#1% + {\edef\bibtexrefcontent{\xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='#1']/field[@name='doi']/context()}}% + \dobibtexurllink{#1}{http://dx.doi.org/\bibtexrefcontent}} + +\def\dodobibtexpageref#1% + {\dobibtexatlink{#1}{}} % second argument can become 'page' + +\def\dodobibtexnoneref#1% + {} + +\def\dodobibtexshortref#1% + {\edef\bibtexrefcontent{\xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='#1']/bibtexshort()}}% + \dobibtexgotolink{#1}{\bibtexrefcontent}} + +\def\dodobibtexyearref#1% + {\edef\bibtexrefcontent{\xmlfilter{bibtex:\currentbibtexsession}{/bibtex/entry[@tag='#1']/field[@name='year']/context()}}% + \bibtexrefcontent} + +% \def\bibmaybeinteractive#1#2% +% {\doifelsevalue{@@pv\@@currentalternative\c!compress} +% {\ifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}} +% {#2}} + +% \def\bibauthornumref[#1]% +% {\getcommalistsize[#1]% +% \global\bibitemcounter\commalistsize +% \bibresetrefsep +% \processcommalist[#1]\dobibauthornumref } +% +% \def\dobibauthornumref#1% +% {\bibinsertrefsep +% \doifbibreferencefoundelse{#1} +% {\begingroup +% \bibgetvara{#1}% +% \bibalternative\c!inbetween +% \setuppublications[\c!refcommand=num]% +% \cite[#1]% +% \endgroup} +% {\unknownreference{#1}}} + +% compress years +% andtext namesep +% otherstext authoretallimit + +% we will use context.* instead at the lua end because it saves us passing settings + +% \def\thebibtexpubsep {\bibtexpublicationsparameter\c!pubsep} +% \def\thebibtexlastpubsep {\bibtexpublicationsparameter\c!lastpubsep} +% \def\thebibtexfinalpubseparator{\bibtexpublicationsparameter\c!lastpubsep} + +\def\dodobibtexauthorref #1{\ctxlua{bibtex.authorref ("bibtex:\currentbibtexsession","#1","normal","author")}} +\def\dodobibtexauthoryearref #1{\ctxlua{bibtex.authoryearref ("bibtex:\currentbibtexsession","#1","normal","author")}} +\def\dodobibtexauthoryearsref#1{\ctxlua{bibtex.authoryearsref("bibtex:\currentbibtexsession","#1","normal","author")}} + +\unexpanded\def\bibtexsingularplural#1#2{\ctxlua{bibtex.singular_or_plural(\!!bs#1\!!es,\!!bs#2\!!es)}} + \protect \endinput + diff --git a/Master/texmf-dist/tex/context/base/bibl-tra.lua b/Master/texmf-dist/tex/context/base/bibl-tra.lua new file mode 100644 index 00000000000..44223102859 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/bibl-tra.lua @@ -0,0 +1,194 @@ +if not modules then modules = { } end modules ['bibl-bib'] = { + version = 1.001, + comment = "this module is the basis for the lxml-* ones", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +bibtex = bibtex or { } +bibtex.hacks = bibtex.hacks or { } + +local match, gmatch, format, concat, sort = string.match, string.gmatch, string.format, table.concat, table.sort +local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes +local variables, constants = interfaces.variables, interfaces.constants + +local trace_bibtex = false trackers.register("publications.bibtex", function(v) trace_bibtex = v end) + +local hacks = bibtex.hacks + +local list, done, alldone, used, registered, ordered = { }, { }, { }, { }, { }, { } +local mode = 0 + +local template = string.striplong([[ + \citation{*} + \bibstyle{cont-%s} + \bibdata{%s} +]]) + +function hacks.process(settings) + local style = settings.style or "" + local database = settings.database or "" + local jobname = tex.jobname + if database ~= "" then + interfaces.showmessage("publications",3) + io.savedata(file.addsuffix(jobname,"aux"),format(template,style,database)) + if trace_bibtex then + logs.report("publications","processing bibtex file '%s'",jobname) + end + os.execute(format("bibtex %s",jobname)) + -- purge 'm + end +end + +function hacks.register(str) + if trace_bibtex then + logs.report("publications","registering bibtex entry '%s'",str) + end + registered[#registered+1] = str + ordered[str] = #registered +end + +function hacks.reset(m) + mode, list, done = m, { }, { } +end + +function hacks.add(str,listindex) + if not str or mode == 0 then + -- skip + elseif mode == 1 then + -- all locals but no duplicates + local sc = structure.sections.currentid() + if done[str] ~= sc then + done[str], alldone[str] = sc, true + list[#list+1] = { str, listindex } + end + elseif mode == 2 then + -- all locals but no preceding + local sc = structure.sections.currentid() + if not alldone[str] and done[str] ~= sc then + done[str], alldone[str] = sc, true + list[#list+1] = { str, listindex } + end + end +end + +local function compare(a,b) + local aa, bb = a[1], b[1] + if aa and bb then + return ordered[aa] < ordered[bb] + else + return true + end +end + +function hacks.flush(sortvariant) + if sortvariant == "" or sortvariant == variables.cite or sortvariant == "default" then + -- order is cite order i.e. same as list + else + sort(list,compare) + end + for i=1,#list do + context.doprocessbibtexentry(list[i][1]) + end +end + +function hacks.registerplaced(str) + used[str] = true +end + +function hacks.doifalreadyplaced(str) + commands.testcase(used[str]) +end + +-- we ask for :tag but when we can't find it we go back +-- to look for previous definitions, and when not found again +-- we look forward + +local function compare(a,b) + return a[3] < b[3] +end + +function hacks.resolve(prefix,block,reference) -- maybe already feed it split + local subset = jobreferences.collected[prefix or ""] or jobreferences.collected[""] + if subset then + local result, done = { }, { } + block = tonumber(block) + for rest in gmatch(reference,"([^,%s]+)") do + local blk, tag, found = block, nil, nil + if block then + tag = blk .. ":" .. rest + found = subset[tag] + if not found then + for i=block-1,1,-1 do + tag = i .. ":" .. rest + found = subset[tag] + if found then + blk = i + break + end + end + end + end + if not found then + blk = "*" + tag = blk .. ":" .. rest + found = subset[tag] + end + if found then + local current = found.entries and found.entries.text + if current and not done[current] then + result[#result+1] = { blk, rest, current } + done[current] = true + end + end + end + -- todo: ranges so the interface will change + sort(result,compare) + local first, last, firsti, lasti, firstr, lastr + local collected = { } + for i=1,#result do + local r = result[i] + local current = r[3] + if not first then + first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r + elseif current == last + 1 then + last, lasti, lastr = current, i, r + else + if last > first + 1 then + collected[#collected+1] = { firstr[1], firstr[2], lastr[1], lastr[2] } + else + collected[#collected+1] = { firstr[1], firstr[2] } + if last > first then + collected[#collected+1] = { lastr[1], lastr[2] } + end + end + first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r + end + end + if first then + if last > first + 1 then + collected[#collected+1] = { firstr[1], firstr[2], lastr[1], lastr[2] } + else + collected[#collected+1] = { firstr[1], firstr[2] } + if last > first then + collected[#collected+1] = { lastr[1], lastr[2] } + end + end + end + if #collected > 0 then + for i=1,#collected do + local c = collected[i] + if c[3] then + context.dowithbibtexnumrefrange(#collected,i,prefix,c[1],c[2],c[3],c[4]) + else + context.dowithbibtexnumref(#collected,i,prefix,c[1],c[2]) + end + end + else + context.nobibtexnumref("error 1") + end + else + context.nobibtexnumref("error 2") + end +end diff --git a/Master/texmf-dist/tex/context/base/bibl-tra.mkii b/Master/texmf-dist/tex/context/base/bibl-tra.mkii new file mode 100644 index 00000000000..087781db9b1 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/bibl-tra.mkii @@ -0,0 +1,1778 @@ +%D \module +%D [ file=bibl-tra, +%D version=2009.08.13, +%D title=\CONTEXT\ Publication Module, +%D subtitle=Publications, +%D author=Taco Hoekwater, +%D date=\currentdate, +%D copyright=Public Domain] +%C +%C Donated to the public domain. + +%D This used to be module \type {t-bib} but due to the number of differences +%D in handling structure between \MKII\ and \MKIV\ we now have \BIBTEX\ support +%D in the kernel. The only patches concerns some namespace issues. Also, +%D constants and variables are now predefined. When the \MKIV\ code is well +%D tested I might backport a couple of adaptions to this \MKII\ variant. + +\writestatus{loading}{ConTeXt Bibliography Support / BibTeX} + +\definefilesynonym[bib][obsolete] + +% here starts t-bib.tex + +%D The original was developed independantly by Taco Hoekwater while still working for Kluwer +%D Academic publishers (it still used the dutch interface then). Development continued after +%D he left Kluwer, and in Januari 2005, the then already internationalized file was merged +%D with the core distribution by Hans Hagen. The current version is once again by Taco. +%D +%D More documentation and additional resources can be found on the contextgarden: +%D \hyphenatedurl{http://wiki.contextgarden.net//Bibliography}. + +%D \subject{DONE (dd/mm/yyyy)} +%D +%D \startitemize +%D \item add author definition (and associated system variable) (26/05/2005) +%D \item add finalnamesep support for Oxford comma (17/09/2005) +%D \item add \type{\insert...} for: doi, eprint, howpublished (19/09/2005) +%D \item allow a defaulted \type{\setupcite} (19/11/2005) +%D \item renamed citation type 'number' to 'serial' (19/11/2005) +%D \item better definition of \type{\inverted...author} (19/11/2005) +%D \item don't reset [numbercommand] in \type {\setuppublication} by default (20/11/2005) +%D \item don't disable other \type {\setuppublication} keys if alternative is present (20/11/2005) +%D \item drop \type{\sanitizeaccents} (20/11/2005) +%D \item added \type{\nocite} and \type{\cite[none]} (21/11/2005) +%D \item added headtext for it (23/11/2005) +%D \item make \type{\cite[url]} and \type{\cite[doi]} interactive (23/11/2005) +%D \item make right-aligned labels in the list work even when autohang=no +%D \item use 'et al.' instead of 'et.al.'. Pointed out by Peter M�nster (30/12/2005) +%D \item added headtext for cz (31/12/2005) +%D \item Keep whitespace after \type{\cite} with single argument (31/12/2005) +%D \item Fix broken \type{\cite{}} support (31/12/2005) +%D \item Use \type{\readfile} inside \type{\usepublications} instead of \type{\readsysfile} (12/01/2006) +%D \item Use \type{\currentbibyear} and \type{\currentbibauthor} instead of \type{\YR} and \type{\AU} (05/02/2006) +%D \item Fix compressed version of authoryear style (05/02/2006) +%D \item Rename the clashing data fields \type{\url} and \type{\type} to \type{\biburl} and \type{\bibtype} (05/02/2006) +%D \item Added two french bibl files from Renaud Aubin (06/02/2006) +%D \item Five new bib class and eight extra bib fields, for IEEEtran (07/02/2006) +%D \item French keyword translation, provided by Renaud (08/02/2006) +%D \item fix underscores in undefined keys (22/02/2006) +%D \item Destroy interactivity in labels of the publication list (13/03/2006) +%D \item fix multi-cite list compression (11/4/2006) +%D \item fix \type{\getcitedata} (11/4/2006) +%D \item magic for chapter bibs (18-25/4/2006) +%D \item language setting (25/4/2006) +%D \item use \type{\hyphenatedurl} for \type{\inserturl} (25/4/2006) +%D \item Add \type{\docitation} to \type{\nocite}(26/4/2006) +%D \item patents can have numbers, added to bst files (26/4/2006) +%D \item \type{\docitation} needs a \type{\iftrialtypesetting} (27/4/2006) +%D \item \type{\filllocalpublist}'s loop is bound by definedness, not resolvedness (27/4/2006) +%D \item \type{\setuppublications[monthconversion=]} added (15/5/2006) +%D \item use \type{\undefinedreference} instead of bare question marks (15/5/2006) +%D \item add grouping around \type{\placepublications} commands (16/5/2006) +%D \item fix a bug in \type{\cite{}} (17/5/2006) +%D \item support \type{\cite[authornum]} (18/5/2006) +%D \item make \type{\cite} unexpandable (20/6/2006) +%D \item allow hyperlinks in author\&year combo's +%D (cite list compression has to be off) (20/6/2006) +%D \item fix duplicate labels for per-chapter style (20/6/2006) +%D \item allow \type{\setupcite[interaction=(start|stop)]} +%D \item fix the item number in the publication list with 'numbering=yes' (22/6/2006) +%D \item make the default criterium for \type{\placepublications} be \type{previous} (23/6/2006) +%D \item fix \type{\normalauthor} and \type{\normalshortauthor} spacing (29/6/2006) +%D \item do not typeset empty arguments to \type{\typesetapublication} (29/6/2006) +%D \item add \type{symbol=none} to \type{\setuplist} in unnumbered +%D mode to prevent typesetting of bare numbers (29/6/2006) +%D \item remove two incorrect spaces from bibl-num.tex (1/7/2006) +%D \item reset font styles within \type{\cite}, so that font switches +%D in \type{left} stay in effect (12/7/2006) +%D \item guard added against loading bbl files multiple times (13/7/2006) +%D \item fix \type{\cite[num]} with compression is on. (14/7/2006) +%D \item test \type{\iflocation} before deciding to use the +%D interactive version of cite (18/7/2006) +%D \item support \type{\setupcite[authoretallimit=1]} (18/7/2006) +%D \item support use of \type{\cite} within titles and captions by +%D saveguarding the list item extraction and reference placement +%D code (19/7/2006) +%D \item support \type{\setuppublicationlist[title=\chapter]} (4/8/2006) +%D \item use the expansion of \type{\headtext{pubs}} (4/8/2006) +%D \item hook added for repeated authors in publication list +%D \type{\setuppublicationlist[artauthorcommand=\mythreeargscommand]} +%D (4/8/2006) +%D \item make the bracketed arguments of \type{\artauthor}, \type{\author} +%D and \type{\editor} (bbl commands) optional (4/8/2006) +%D \item the constants \type{sorttype}, \type{compress} and +%D \type{autohang} have moved to the core (8/8/2006) +%D \item bibtex is now registered as a program to be run by texexec (8/8/2006) +%D \item fix a bug in \type{\setupcite[authoretallimit=1]} (9/8/2006) +%D \item fix a bug inside citations that prevented lastpubsep from ever being +%D used due to a volatile \type{\commalistsize} (25/8/2006). +%D \item added the possibility of \type{\placepublications[option=continue]} +%D (6/9/2006) +%D \item Mojca translated Master's Thesis to Masterarbeit (bibl-apa-de.tex) +%D (12/9/2006) +%D \item Added \type{\setuppublicationlist[maybeyear=off]} by request from +%D Thomas Schmitz (15/9/2006) +%D \item Removed some spurious spaces pointed out by willi egger (19/9/2006) +%D \item Add configuration of bibtex executable name (4/11/2006) +%D \item Fix numbering=short and numbering=bib (spotted by Matthias W�chter) (4/11/2006) +%D \item third attempt to get a correct release (5/11/2006) +%D \item fix a few missing dots in bibl-num.tex (7/12/2006) +%D \item Patch for DOI's by Tobias Burnus (17/4/2007) +%D \item Patch for \type{\insertbiburl} and \type{\insertdoi} for Tobias Burnus (18/4/2007) +%D \item Added a missing \type{\relax} in \type{\dospecialbibinsert}, +%D that made the space before the {\it et al.} text disappear. (18/4/2007) +%D \item Attempt to fix percent signs in bbl files. As a side-effect, +%D this prohibits comments in \tex{startpublication} blocks! (17/4/2008) +%D \item Patch from Matthias W\"achter that allows arbitrary .bst +%D files to be used with \tex{setupbibtex} (25/9/2008) +%D \item Extended for the new multilingual setups for the Oct 2008 current of ConTeXt (23/10/2008) +%D \item Multilingual setups needed another fix (27/10/2008) +%D \item Two fixes for bibl-apa by Michael Green (27/10/2008) +%D \item Catalan translation of 'References' (10/11/2008) +%D \item 'chapter' -> 'chapitre' in bibl-apa-fr (27/11/2008) +%D \item Run bibtex via os.execute in mkiv modee (01/12/2008) +%D \item Small correction in bibl-apa's placement of volume +%D information in articles (05/01/2009) +%D \item Handle multi-author (more than two) cases in \type{\cite} +%D (02/03/2009) +%D \item Suppress a syntax error in \type{cont-xp} mode. The output is +%D probably not right, though (02/03/2009) +%D \item Added a \tex{loadmarkfile} at the end, and two new files +%D from Hans. The \type{t-bib.mkiv} is needed to make the module +%D work with the new structure code (17/04/2009) +%D \item Added a patch to \type{t-bib.mkiv} from Hans to make the +%D cross referencing between multiple citations an +%D bibliographies work (27/04/2009) +%D \item Remove a superfluous \type{\unprotect} in t-bib.mkiv (11/05/2009). +%D \item Patch of incollection in bibl-ams.tex from Xan (08/06/2009). +%D \item Patch of unpublished in bibl-ams.tex from Xan (22/07/2009). +%D \item Modified \type{\bibdogetupsometextprefix} so it works for undefined +%D language labels, from Hans (13/08/2009). +%D \item Removed some \MKIV\ hacks as well as some things that are in the +%D core like variables, constants and messages (HH:22/08/2009). +%D \item Added \type{bib} in front of \type {insert} macros and initialize +%D then later on (HH:22/08/2009). +%D \item Removed test for type {\currentlocationreference} plus associated +%D code (HH:22/08/2009). +%D \stopitemize +%D +%D \subject{WISHLIST} +%D +%D \startitemize +%D \item link back from publication list to citation +%D \item export \type {\citation{}} +%D \item support mlbibtex +%D \item don't load the whole lot, but filter entries instead +%D \stopitemize + +\unprotect + +%D Variables, constants and messages are removed as they are now in the +%D multilingual interface modules. + +\def\biblistname{pubs} % for compatibility + +%D how to load the references. There is some new stuff here +%D to support Idris' (incorrect :-)) use of projects + +% \let\preloadbiblist\relax +% +% \ifx\currentcomponent\v!text +% % single file +% \edef\temp{\the\everystarttext}% +% \ifx\temp\empty +% % post-starttext +% \def\preloadbiblist{\dousepublications\jobname }% +% \else +% % pre-starttext +% \appendtoks \dousepublications\jobname \to \everystarttext +% \fi +% % +% \else \ifx\currentcomponent\v!project +% % a project file, have to set up the partial products! +% \def\startproduct #1 % +% {\doateverystarttext +% \dousepublications{#1}% +% \donextlevel\v!product\currentproduct +% \doexecutefileonce\doexecutefileonce +% \donotexecutefile\doexecutefile#1\\}% +% % +% \else \ifx\currentcomponent\v!product +% % a product file +% \def\preloadbiblist{\dousepublications\jobname }% +% % +% \else +% % a component? not sure what to do +% \def\preloadbiblist{\dousepublications\jobname }% +% % +% \fi \fi \fi + +\def\preloadbiblist + {\globallet\preloadbiblist\relax + \dousepublications\jobname} + +\definelist[pubs] +\setuplist[pubs][\c!width=] + +%D \macros{bibdoif,bibdoifnot,bibdoifelse} +%D +%D Here are a few small helpers that are used a lot +%D in all the typesetting commands +%D (\type{\insert...}) we will encounter later. + +\long\def\bibdoifelse#1% + {\@EA\def\@EA\!!stringa\@EA{#1}% + \ifx\!!stringa\empty + \expandafter\secondoftwoarguments + \else + \expandafter\firstoftwoarguments + \fi} + +\long\def\bibdoifnot#1% + {\@EA\def\@EA\!!stringa\@EA{#1}% + \ifx\!!stringa\empty + \expandafter\firstofoneargument + \else + \expandafter\gobbleoneargument + \fi} + +\long\def\bibdoif#1% + {\@EA\def\@EA\!!stringa\@EA{#1}% + \ifx\!!stringa\empty + \expandafter\gobbleoneargument + \else + \expandafter\firstofoneargument + \fi} + +%D Bibtex settings separated out + +%D No point in writing the aux file if there is no database... + +\def\setupbibtex{\dosingleempty\dosetupbibtex} + +\def\dosetupbibtex[#1]% + {\let\@@pbdatabase\empty + \getparameters[\??pb][\c!sort=\s!default,#1]% + \expanded{\processaction[\@@pbsort]} + [ \v!no=>\def\bibstyle{cont-no}, + \v!author=>\def\bibstyle{cont-au}, + \v!title=>\def\bibstyle{cont-ti}, + \v!short=>\def\bibstyle{cont-ab}, + \s!default=>\def\bibstyle{cont-no}, + \s!unknown=>\def\bibstyle{\@@pbsort}]% + \ifx\@@pbdatabase\empty\else \writeauxfile \fi} + +\dosetupbibtex[bibtex=bibtex] + +%D \macros{writeauxfile} +%D +%D Unfortunately, \BIBTEX\ is not the best configurable program +%D around. The names of the commands it parses as well as the \type{.aux} +%D extension to the file name are both hardwired. +%D +%D This means \CONTEXT\ has to write a \LATEX-style auxiliary file, yuk! +%D The good news is that it can be rather short. We'll just ask +%D \BIBTEX\ to output the entire database(s) into the \type{bbl} file. +%D +%D The \type{\bibstyle} command controls how the \type{bbl} file will +%D be sorted. The possibilities are: +%D +%D \startitemize[packed] +%D \item by author (+year, title): cont-au.bst +%D \item by title (+author, year): cont-ti.bst +%D \item by short key as in abbrev.bst: cont-ab.bst +%D \item not sorted at all: cont-no.bst +%D \stopitemize + +\def\writeauxfile + {\doifmode{*\v!first} + {\openout \scratchwrite \jobname.aux + \write \scratchwrite {\string\citation{*}}% + \write \scratchwrite {\string\bibstyle{\bibstyle}}% + \write \scratchwrite {\string\bibdata{\@@pbdatabase}}% + \closeout\scratchwrite + \showmessage\m!publications{3}{}% + \expanded{\installprogram{\@@pbbibtex\space\jobname}}}} + +%D \macros{ifsortbycite,iftypesetall,ifautohang,ifbibcitecompress} +%D +%D The module needs some new \type{\if} statements. + +%D Default sort order of the reference list is by citation. + +\newif\ifsortbycite \sortbycitetrue + +%D By default, only referenced publications are typeset + +\newif\iftypesetall \typesetallfalse + +%D Hanging indentation of the publication list +%D will not adjust itself according to the width of the label. + +\newif\ifautohang \autohangfalse + +%D Cite lists are compressed, if possible. + +\newif\ifbibcitecompress \bibcitecompresstrue + +\def\setuppublications + {\dosingleargument\dosetuppublications} + +\def\bibleftnumber#1% + {#1\hfill~} + +\def\dosetuppublications[#1]% + {\getparameters + [\??pb] + [\c!alternative=,#1]% + \doifsomething\@@pbalternative + {\readsysfile + {bibl-\@@pbalternative.tex} + {\showmessage\m!publications{6}{bibl-\@@pbalternative}\let\@@pbalternative\empty} + {\showmessage\m!publications{1}{bibl-\@@pbalternative}\let\@@pbalternative\empty}}% + \getparameters + [\??pb] + [#1]% + \processaction + [\@@pbcriterium] + [ \v!all=>\typesetalltrue, + \s!unknown=>\typesetallfalse]% + \processaction + [\@@pbautohang] + [ \v!yes=>\autohangtrue, + \s!unknown=>\autohangfalse]% + \processaction + [\@@pbsorttype] + [ \v!cite=>\sortbycitetrue, + \v!bbl=>\sortbycitefalse, + \s!default=>\sortbycitetrue, + \s!unknown=>\sortbycitefalse]% + \processaction + [\@@pbnumbering] + [ \v!yes=>\let\@@pbinumbercommand\firstofoneargument, + \v!no=>\let\@@pbinumbercommand\gobbleoneargument, + \v!short=>\def\@@pbinumbercommand##1{\getvalue{pbds-\@@pbk}}, + \v!bib=>\def\@@pbinumbercommand##1{\getvalue{pbdn-\@@pbk}}, + \s!unknown=>\let\@@pbinumbercommand\firstofoneargument]% + \processaction + [\@@pbrefcommand] + [\s!default=>\edef\@@citedefault{\@@pbrefcommand}, + \s!unknown=>\edef\@@citedefault{\@@pbrefcommand}]} + +% initialize + +\def\@@pbrefcommand{num} +\def\@@pbnumbercommand{\bibleftnumber} + +%D \macros{usepublications} +%D +%D We need \type{\usereferences} so that it is possible to +%D refer to page and/or appearance number for publications +%D in the other document. + +\def\usepublications[#1]% + {\usereferences[#1]\processcommalist[#1]\dousepublications} + +\def\dousepublications#1% + {\doonlyonce + {#1.\f!bibextension} + {\readfile{#1.\f!bibextension} + {\showmessage\m!publications{4}{#1.\f!bibextension}} + {\showmessage\m!publications{2}{#1.\f!bibextension}}}} + +%D \macros{setuppublicationlist} +%D +%D This will be the first command in (\BIBTEX-generated) \type{bbl} +%D files. `samplesize' is a sample value (in case of \BIBTEX-generated +%D files, this will be the longest `short' key). `totalnumber' +%D is the total number of entries that will follow in this +%D file. + +%D Both values are only needed for the label calculation +%D if `autohang' is `true', so by default the command is +%D not even needed, and therefore I saw no need to give +%D it it's own system variable and it just re-uses \type{pb}. + +\def\setuppublicationlist + {\dosingleempty\dosetuppublicationlist} + +\def\dosetuppublicationlist[#1]% + {\getparameters[\??pv data][#1]% + \setuplist + [pubs] + [\c!samplesize={AA99},\c!totalnumber={99}, + \c!alternative=a,\c!interaction=,\c!pagenumber=\v!no,#1]} + +\def\setuppublicationlayout[#1]#2% + {\setvalue{\??pv data#1}{#2\unskip}} + +%D \macros{bibalternative} +%D +%D A nice little shorthand that will be used so we don't have to +%D key in the weird \type{\@@pv} parameter names all the time. + +\def\bibalternative#1% + {\getvalue{\??pv\@@currentalternative#1}} + +%D \macros{simplebibdef,bibcommandlist} +%D +%D \type{\simplebibdef} defines \type{bib@#1}, which in turn will +%D use one argument that is stored in \type{@@pb@#1}. +%D +%D \type{\simplebibdef} also defines \type{insert#1}, which can be +%D used in the argument of \type{\setuppublicationlayout} to fetch +%D one of the \type{@@pb@} data entries. \type{insert#1} then has +%D three arguments: \type{#1} are commands to be executed before the +%D data, \type{#2} are commands to be executed after the data, and +%D \type{#3} are commands to be executed if the data is not found. + +%D \type{\bibcommandlist} is the list of commands that is affected +%D by this approach. Later on, it will be used to do a series +%D of assignments from \type{#1} to \type{bib@#1}: e.g +%D \type{\title} becomes \type{\bib@title} when used within +%D a publication. + +\newtoks\initializebibdefinitions % we need to prevent clashes (HH) + +% \def\simplebibdef#1% hh: funny expansion ? +% {\@EA\long\@EA\def\csname bib@#1\endcsname##1% +% {\setvalue{\??pb @#1}{##1}% +% \ignorespaces}% +% \@EA\def\csname insert#1\endcsname##1##2##3% +% {\@EA\bibdoifelse +% \@EA{\csname @@pb@#1\endcsname}% +% {##1\csname @@pb@#1\endcsname##2}% +% {##3}% +% }} + +\def\simplebibdef#1% hh: funny expansion ? + {\@EA\long\@EA\def\csname bib@#1\endcsname##1% + {\setvalue{\??pb @#1}{##1}\ignorespaces}% + \expandafter \appendtoks + \expandafter\let\csname insert#1\expandafter\endcsname\csname bibinsert#1\endcsname + \to \initializebibdefinitions + \@EA\unexpanded\@EA\def\csname bibinsert#1\endcsname##1##2##3% + {\@EA\bibdoifelse\@EA{\csname\??pb @#1\endcsname}{##1\csname\??pb @#1\endcsname##2}{##3}}} + +\def\bibcommandlist + {abstract, annotate, arttitle, assignee, bibnumber, bibtype, biburl, chapter, city, + comment, country, day, dayfiled, doi, edition, eprint, howpublished, isbn, issn, + issue, journal, keyword, keywords, lastchecked, month, monthfiled, names, nationality, + note, notes, organization, pages, pubname, pubyear, revision, series, size, thekey, + title, volume, yearfiled} + +\processcommacommand[\bibcommandlist]\simplebibdef + +\def\bibinsertdoi#1#2#3% + {{\bibdoifelse{\@@pb@doi}% + {\edef\ascii{\@EA\detokenize\@EA{\@@pb@doi}}% + #1\expanded{\gotoDOI{\@@pb@thekey}{\ascii}}#2}{#3}}} + +\def\bibinsertbiburl#1#2#3% + {{\bibdoifelse{\@@pb@biburl}% + {\edef\ascii{\@EA\detokenize\@EA{\@@pb@biburl}}% + #1\expanded{\gotoURL{\@@pb@thekey}{\ascii}}#2}{#3}}} + +\def\bibinsertmonth#1#2#3% + {\bibdoifelse{\@@pb@month}% + {#1\doifnumberelse{\@@pb@month}% + {\doifconversiondefinedelse\@@pbmonthconversion + {\convertnumber\@@pbmonthconversion{\@@pb@month}}{\@@pb@month}}% + {\@@pb@month}#2}{#3}} + +\appendtoks + \let\inserturl \bibinsertbiburl % for backward compat. + \let\inserttype\bibinsertbibtype % for backward compat. +\to\initializebibdefinitions + +\def\newbibfield[#1]% + {\simplebibdef{#1}% + \edef\bibcommandlist{\bibcommandlist,#1}} + +%D \macros{bib@crossref} +%D +%D \type{\crossref} is used in database files to point to another +%D entry. Because of this special situation, it has to be defined +%D separately. Since this command will not be seen until at +%D \type{\placepublications}, it may force extra runs. The same is +%D true for \type{\cite} commands inside of publications. + +\def\bib@crossref#1% + {\setvalue{\??pb @crossref}{#1}\ignorespaces} + +\def\bibinsertcrossref#1#2#3% + {\bibdoifelse{\@@pb@crossref} + {#1\@EA\cite\@EA[\@@pb@crossref]#2} + {#3}} + +\appendtoks\let\insertcrossref\bibinsertcrossref\to\initializebibdefinitions + +%D \macros{complexbibdef,specialbibinsert} +%D +%D The commands \type{\artauthor}, \type{\author} and +%D \type{\editor} are more complex than the other commands. +%D Their argument lists have this form: +%D +%D \type{\author[junior]{firstnames}[inits]{von}{surname}} +%D +%D (bracketed stuff is optional) +%D +%D And not only that, but there also might be more than one of each of +%D these commands. This is why a special command is needed to insert +%D them, as well as one extra counter for each command. + +%D All of these \type{\@EA}'s and \type{\csnames} make this code +%D look far more complex than it really is. For example, the argument +%D \type{author} defines the macro \type{\bib@author} to do two +%D things: increment the counter \type{\author@num} (let's say to 2) +%D and next store it's arguments in the macro \type{\@@pb@author2}. +%D And it defines \type{\insertauthors} to expand into +%D \starttyping +%D \specialbibinsert{author}{\author@num}{}{}{} +%D \stoptyping + +\def\docomplexbibdef#1% + {\def\currentype{#1}% + \dosingleempty\dodocomplexbibdef} + +\def\dodocomplexbibdef[#1]#2% + {\def\firstarg{#1}\def\secondarg{#2}% + \dosingleempty\dododocomplexbibdef} + +\def\dododocomplexbibdef[#1]#2#3% + {\@EA\increment\csname \currentype @num\endcsname + \setevalue{\??pb @\currentype\csname \currentype @num\endcsname}% + {{\secondarg}{#2}{#3}{#1}{\firstarg}}\ignorespaces}% + +% \def\complexbibdef#1% +% {\@EA\newcounter\csname #1@num\endcsname +% \@EA\def\csname bib@#1\endcsname{\docomplexbibdef{#1}}% +% \@EA\def\csname insert#1s\endcsname##1##2##3% +% {\specialbibinsert{#1}{\csname #1@num\endcsname}{##1}{\unskip ##2}{##3}}} + +\def\complexbibdef#1% + {\@EA\newcounter\csname #1@num\endcsname + \@EA\def\csname bib@#1\endcsname{\docomplexbibdef{#1}}% + \expandafter \appendtoks + \expandafter\let\csname insert#1s\expandafter\endcsname\csname bibinsert#1s\endcsname + \to \initializebibdefinitions + \@EA\def\csname bibinsert#1s\endcsname##1##2##3{\specialbibinsert{#1}{\csname #1@num\endcsname}{##1}{\unskip ##2}{##3}}} + +\processcommalist[author,artauthor,editor]\complexbibdef + +%D Another level of indirection is needed to control the +%D typesetting of all of these arguments. + +%D Btw, there is a conflict between `author' and the predefined interface +%D variable `auteur'. The old version is overruled `auteur' is +%D overruled by the systemconstant definition at the top of this file! + +\newcount\etallimitcounter +\newcount\etaldisplaycounter +\newcount\todocounter + +\def\specialbibinsert#1#2#3#4#5% + {\bgroup + \ifnum#2>\zerocount + \etallimitcounter =0\bibalternative{#1etallimit}\relax + \etaldisplaycounter=0\bibalternative{#1etaldisplay}\relax + \ifnum #2>\etallimitcounter + \todocounter\etaldisplaycounter + % just in case ... + \ifnum\todocounter>\etallimitcounter + \todocounter\etallimitcounter + \fi + \else + \todocounter#2\relax + \fi + \ifnum\todocounter>\zerocount + % find the current author list + \let\templist\empty + \dorecurse{#2} + {\toks0=\@EA\@EA\@EA{\csname @@pb@#1\recurselevel\endcsname}% + \ifx\templist\empty \edef\templist{\the\toks0}% + \else \edef\templist{\templist,\the\toks0}\fi }% + \doifdefinedelse + {\??pv data#1\c!command} + {\doifemptyelsevalue + {\??pv data#1\c!command} + {#3\dospecialbibinsert{#1}{\todocounter}{\templist}#4}% + {#3\getvalue{\??pv data#1\c!command}{#1}{\todocounter}{\templist}#4}}% + {#3\dospecialbibinsert{#1}{\todocounter}{\templist}#4}% + \else + #5% + \fi + \else + #5% + \fi + \egroup} + +%D This macro does the hard work of inserting a list of people in the +%D output, with proper regard of all the inbetween strings that can +%D arise depending on length of the list of people. + +%D \#1 = type +%D \#2 = number of items to be typeset +%D \#3 = commacommand containing authors + +\def\dospecialbibinsert#1#2#3% + {\getcommacommandsize[#3]% + \scratchcounter 0 + \def\processauthoritem##1% + {\advance\scratchcounter1 + \ifnum \numexpr\scratchcounter-1\relax<#2\relax + \getvalue{\??pv data#1}##1% + \ifnum \scratchcounter=#2\relax + \ifnum\etallimitcounter<\commalistsize\relax \bibalternative{#1etaltext}\fi + \else \ifnum\numexpr\scratchcounter+1 = #2\relax + \ifnum \commalistsize > \plustwo \bibalternative\c!finalnamesep + \else \bibalternative\c!lastnamesep \fi + \else + \bibalternative\c!namesep + \fi \fi + \fi}% + \processcommacommand[#3]\processauthoritem } + +%D \macros{invertedauthor,normalauthor,invertedshortauthor,normalshortauthor} +%D +%D Just some commands that can be used in \type{\setuppublicationparameters} +%D If you want to write an extension to the styles, you might +%D as well define some of these commands yourself. +%D +%D The argument liust has been reordered here, and the meanings +%D are: +%D +%D {\obeylines\parskip0pt +%D \type{#1} firstnames +%D \type{#2} von +%D \type{#3} surname +%D \type{#4} inits +%D \type{#5} junior +%D } +%D + +\def\normalauthor#1#2#3#4#5% + {\bibdoif{#1}{#1\bibalternative\c!firstnamesep}% + \bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!surnamesep#5\unskip}} + +\def\normalshortauthor#1#2#3#4#5% + {\bibdoif{#4}{#4\bibalternative\c!firstnamesep}% + \bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!surnamesep#5\unskip}} + +\def\invertedauthor#1#2#3#4#5% + {\bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!juniorsep #5}% + \bibdoif{#1}{\bibalternative\c!surnamesep #1\unskip}} + +\def\invertedshortauthor#1#2#3#4#5% + {\bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!juniorsep #5}% + \bibdoif{#4}{\bibalternative\c!surnamesep #4\unskip}} + +%D \macros{clearbibitem,clearbibitemtwo,bibitemdefs} +%D +%D These are used in \type{\typesetapublication} to do +%D initializations and cleanups. + +\def\clearbibitem#1{\setvalue{\??pb @#1}{}}% + +\def\clearbibitemtwo#1% + {\letvalue{#1@num}\!!zerocount + \scratchcounter\plusone + \doloop + {\doifdefinedelse{\??pb @#1\the\scratchcounter} + {\letvalue{\??pb @#1\the\scratchcounter}\empty + \advance\scratchcounter\plusone}% + {\exitloop}}} + +\def\bibitemdefs#1% + {\@EA\let\@EA\tempa \csname bib@#1\endcsname + \@EA\let\csname #1\endcsname \tempa } + +%D \macros{startpublication} +%D +%D We are coming to the end of this module, to the macros that +%D do typesetting and read the \type{bbl} file. + +\newcounter\bibcounter + +%D Just a \type{\dosingleempty} is the most friendly +%D of doing this: there need not even be an argument +%D to \type{\startpublication}. Of course, then there +%D is no key either, and it had better be an +%D article (otherwise the layout will be all screwed up). +%D +%D Now prohibits comments, so % can be used for urls + +\def\startpublication + {\edef\bibmodsavedpercent{\the\catcode`\%}% + \catcode`\%=12 + \dosingleempty\dostartpublication} + +\def\stoppublication + {} % the \catcode of % is reset below + +%D Only specifying the key in the argument is also +%D legal. In storing this stuff into macros, some trickery with +%D token registers is needed to fix the expansion problems. Even so, +%D this appears to not always be 100\% safe, so people are +%D urgently advised to use \ETEX\ instead of traditional \TEX. +%D +%D In \ETEX, all expansion problems are conviniently solved by +%D the primitive \type{\protected}. To put that another way: +%D +%D It's not a bug in this module if it does not appear in \ETEX! + +\long\def\dostartpublication[#1]#2\stoppublication% + {\increment\bibcounter + \bgroup + \doifassignmentelse{#1}% + {\getparameters[\??pb][k=,t=article,n=,s=,a=,y=,o=,u=,#1]}% + {\getparameters[\??pb][k=#1,t=article,n=,s=,a=,y=,o=,u=]}% + \@EA\toks\@EA2\@EA{\@@pba}% + \@EA\toks\@EA4\@EA{\@@pbs}% + \toks0={\ignorespaces #2}% + \setxvalue{pbdk-\@@pbk}{\@@pbk} + \setxvalue{pbda-\@@pbk}{\the\toks2} + \setxvalue{pbdy-\@@pbk}{\@@pby} + \setxvalue{pbds-\@@pbk}{\the\toks4} + \setxvalue{pbdn-\@@pbk}{\@@pbn} + \setxvalue{pbdt-\@@pbk}{\@@pbt} + \setxvalue{pbdo-\@@pbk}{\@@pbo} + \setxvalue{pbdu-\@@pbk}{\@@pbu} + \setxvalue{pbdd-\@@pbk}{\the\toks0} + \xdef\allrefs{\allrefs,\@@pbk}% + \egroup + \catcode`\%=\bibmodsavedpercent\relax } + +% intialization of the order-list: + +\let\allrefs\empty + +%D The next macro is needed because the number command of the +%D publist sometimes needs to fetch something from the current +%D item (like the 'short' key). For this, the ID of the current +%D item is passed in the implict parameter \type{\@@pbk} + +\def\makepbkvalue#1{\def\@@pbk{#1}} + +\newif\ifinpublist + +% from Hans + +\def\ignoresectionconversion + {\let\@@sectionconversion\secondoftwoarguments} + +\let\normaldosetfilterlevel\dosetfilterlevel + +\def\patcheddosetfilterlevel#1#2% beware: this one is \let + {\bgroup + \ignoresectionconversion + \edef\askedlevel{#1}% + \edef\askedfilter{#2}% +% \message{ASKD: \meaning\askedlevel}% +% \message{PREV: \meaning\v!previous}% + \ifx\askedlevel\v!current + \dosetcurrentlevel\askedlevel + \else\ifx\askedlevel\v!previous + \dosetpreviouslevel\askedlevel + \else\ifx\askedlevel\v!all + \global\chardef\alltoclevels\plusone + \else\ifx\askedlevel\v!text + \global\chardef\alltoclevels\plusone + \else + \edef\byaskedlevel{\csname\??by\askedlevel\endcsname}% + \ifx\byaskedlevel\v!text + \dosettextlevel\askedlevel + \else + \dosetotherlevel\askedlevel + \fi + \fi\fi\fi\fi + % experiment + \ifx\askedfilter\empty \else + \xdef\currentlevel{\currentlevel\sectionseparator\askedfilter}% + \fi + \egroup} + +\def\filllocalpublist% + {\doifdefinedelse{\alltoclevels} + {\let\dosetfilterlevel\patcheddosetfilterlevel + \dosettoclevel\??li{pubs}% + \let\dosetfilterlevel\normaldosetfilterlevel }% + {\dosettoclevel\??li{pubs}}% + \global\let\glocalpublist\empty + \doloop + {\doifdefinedelse + {\r!cross cite-\jobname-\recurselevel} + {\doifreferencefoundelse + {cite-\jobname-\recurselevel} + {\@EA\doifreglevelelse\@EA[\currentlocationreference] + {\@EA\doglobal\@EA\addtocommalist\@EA + {\currenttextreference}\glocalpublist}{}} + {}}% + {\exitloop}}% + \let\localpublist\glocalpublist} + +\def\typesetpubslist + {\dobeginoflist +% \the\initializebibdefinitions + \edef\askedlevel{\csname \??li pubs\c!criterium\endcsname}% + \ifx\askedlevel\v!all + \def\bibrefprefix{}% + \else % + \preparebibrefprefix + \fi + \ifsortbycite + \filllocalpublist + \iftypesetall + \let\localallrefs\allrefs + \processcommacommand[\localpublist]\typesetapublication + \def\removefromallrefs##1% + {\removefromcommalist{##1}\localallrefs }% + \processcommacommand[\localpublist]\removefromallrefs + \processcommacommand[\localallrefs]\typesetapublication + \else + \processcommacommand[\localpublist]\typesetapublication + \fi + \else + \iftypesetall + \processcommacommand[\allrefs]\typesetapublication + \else + % + \filllocalpublist + \processcommacommand[\allrefs]\maybetypesetapublication + \fi + \fi + \doendoflist} + +\newif\ifinpublist + +\def\maybetypesetapublication#1% + {\global\inpublistfalse + \def\test{#1}% + \def\runtest##1% + {\def\tempa{##1}\ifx \test\tempa \global\inpublisttrue \fi}% + \processcommacommand[\localpublist]\runtest + \ifinpublist \typesetapublication{#1}\fi} + +\def\initializepubslist + {\edef\@@pbnumbering{\@@pbnumbering}% + \ifautohang + \ifx\@@pbnumbering\v!short + \setbox\scratchbox\hbox{\@@pbnumbercommand{\getvalue{\??li pubs\c!samplesize}}}% + \else\iftypesetall + \setbox\scratchbox\hbox{\@@pbnumbercommand{\getvalue{\??li pubs\c!totalnumber}}}% + \else + \setbox\scratchbox\hbox{\@@pbnumbercommand{\numreferred}}% + \fi\fi + \edef\samplewidth{\the\wd\scratchbox}% + \setuplist[pubs][\c!width=\samplewidth,\c!distance=0pt]% + \ifx\@@pbnumbering\v!short + \def\@@pblimitednumber##1{\hbox to \samplewidth + {\@@pbnumbercommand{\getvalue{pbds-\@@pbk}}}}% + \else \ifx \@@pbnumbering\v!bib + \def\@@pblimitednumber##1{\hbox to \samplewidth + {\@@pbnumbercommand{\getvalue{pbdn-\@@pbk}}}}% + \else + \def\@@pblimitednumber##1{\hbox to \samplewidth{\@@pbnumbercommand{##1}}}% + \fi \fi + \else + \ifx\@@pbnumbering\v!short + \doifemptyelse + {\getvalue{\??li pubs\c!width}} + {\def\@@pblimitednumber##1{\hbox + {\@@pbnumbercommand{\getvalue{pbds-\@@pbk}}}}}% + {\def\@@pblimitednumber##1{\hbox to \getvalue{\??li pubs\c!width}% + {\@@pbnumbercommand{\getvalue{pbds-\@@pbk}}}}}% + \else \ifx \@@pbnumbering\v!bib + \doifemptyelse + {\getvalue{\??li pubs\c!width}} + {\def\@@pblimitednumber##1{\hbox + {\@@pbnumbercommand{\getvalue{pbdn-\@@pbk}}}}}% + {\def\@@pblimitednumber##1{\hbox to \getvalue{\??li pubs\c!width}% + {\@@pbnumbercommand{\getvalue{pbdn-\@@pbk}}}}}% + \else + \doifemptyelse + {\getvalue{\??li pubs\c!width}} + {\def\@@pblimitednumber##1{\hbox{\@@pbnumbercommand{##1}}}}% + {\def\@@pblimitednumber##1{\hbox to \getvalue{\??li pubs\c!width}{\@@pbnumbercommand{##1}}}}% + \fi + \fi + \fi + \ifx\@@pbnumbering\v!no + \setuplist[pubs][\c!numbercommand=,\c!symbol=\v!none,\c!textcommand=\outdented]% + \else + \setuplist[pubs][\c!numbercommand=\@@pblimitednumber]% + \fi + \doifelsevalue + {\??pv data\c!maybeyear}{\v!off}{\def\maybeyear##1{}}{\def\maybeyear##1{##1}}% + \forgetall} % bugfix 2005/03/18 + +\def\outdented#1% move to supp-box ? + {\hskip -\hangindent #1} + +%D The full list of publications + +\def\completepublications + {\dosingleempty\docompletepublications} + +\def\bibdogetupsometextprefix#1#2#3% + {\ifcsname#2#1#3\endcsname + \csname#2#1#3\endcsname + \else\ifcsname\??la#1\c!default\endcsname + \@EA\ifx\csname\??la#1\c!default\endcsname\empty + \ifcsname#2#3\endcsname + \csname#2#3\endcsname + \else\ifcsname#2\s!en#3\endcsname + \csname#2\s!en#3\endcsname + \fi\fi + \else + \expandafter\bibdogetupsometextprefix + \csname\??la#1\c!default\endcsname{#2}{#3}% + \fi + \else + \ifcsname#2#3\endcsname + \csname#2#3\endcsname + \else\ifcsname#2\s!en#3\endcsname + \csname#2\s!en#3\endcsname + \fi\fi + \fi\fi} + +\def\docompletepublications[#1]% + {\begingroup + \setuplist[pubs][\c!criterium=\v!previous,#1] + \begingroup + \let\handletextprefix\firstoftwoarguments + \edef\headtextpubs{\bibdogetupsometextprefix\headlanguage\c!title{pubs}}% + \doifdefinedelse + {\??pv data\v!title} + {\doifemptyelsevalue + {\??pv data\v!title} + {\expanded{\systemsuppliedtitle[pubs]{\headtextpubs}}}% + {\expanded{\getvalue{\??pv data\v!title}{\headtextpubs}}}% + }% + {\expanded{\systemsuppliedtitle[pubs]{\headtextpubs}}}% + \endgroup + \dodoplacepublications } + +%D And the portion with the entries only. + +\def\placepublications + {\dosingleempty\doplacepublications} + +\def\doplacepublications[#1]% + {%\getparameters[\??pv data][#1] + \begingroup + \setuplist[pubs][\c!criterium=\v!previous,#1]% + \dodoplacepublications }% + +\def\dodoplacepublications% + {\initializepubslist + \doifelsevalue + {\??li pubs\c!option}{\v!continue}% + {}% + {\global\let\bibcounter\!!zerocount }% + \inpublisttrue + \typesetpubslist + \inpublistfalse + \endgroup} + +%D \subsubject{What's in a publication} + +\unexpanded\def\typesetapublication#1% + {\doifsomething{#1} + {\doglobal\increment\bibcounter + \bgroup + \the\initializebibdefinitions + \makepbkvalue{#1}% + \ifgridsnapping + \snaptogrid\vbox{\dodolistelement{pubs}{}{\bibcounter}% + {\expanded{\reference[\bibrefprefix#1]{\bibcounter}}% + \strut \dotypesetapublication{#1}\strut }{}{}}% + \else + \dodolistelement{pubs}{}{\bibcounter}% + {\expanded{\reference[\bibrefprefix#1]{\bibcounter}}% + \strut \dotypesetapublication{#1}\strut }{}{}% + \fi + \egroup}} + +\def\dotypesetapublication#1% + {\bgroup + \def\@@currentalternative{data}% + \processcommacommand[\bibcommandlist,crossref]\clearbibitem + \processcommalist [artauthor,author,editor]\clearbibitemtwo + \processcommacommand[\bibcommandlist]\bibitemdefs + \processcommalist [artauthor,author,editor,crossref]\bibitemdefs + \let\biblanguage\empty + \getvalue{pbdd-#1}% + \ifcsname pbdt-#1\endcsname \bibalternative{\getvalue{pbdt-#1}}\fi + \egroup } + +%D An afterthought: + +\def\maybeyear#1{} + +%D An another: + +\def\noopsort#1{} + +%D This is the result of bibtex's `language' field. + +\def\setbiblanguage#1#2{\setvalue{\??pb @lang@#1}{#2}} + +\def\lang#1% + {\def\biblanguage{#1}% + \ifcsname \??pb @lang@#1\endcsname + \expanded{\language[\getvalue{\??pb @lang@#1}]}% + \fi \ignorespaces} + +%D \subject{Citations} +%D +%D \macros{cite,bibref} +%D +%D The indirection with \type{\dobibref} allows \LATEX\ style +%D \type{\cite} commands with a braced argument (these might appear +%D in included data from the \type{.bib} file). + +% \unexpanded\def\cite +% {\doifnextcharelse{[} +% {\dodocite} +% {\dobibref}} +% \def\dobibref#1% +% {\docite[#1][]} +% \def\dodocite[#1]% +% {\startstrictinspectnextcharacter +% \dodoubleempty\dododocite[#1]} +% \def\dododocite[#1][#2]{% +% \stopstrictinspectnextcharacter +% \docite[#1][#2]} + +\unexpanded\def\cite + {\strictdoifnextoptionalelse\dodocite\dobibref} + +\def\dobibref#1% + {\docite[#1][]} + +\def\dodocite[#1]% + {\strictdoifnextoptionalelse{\docite[#1]}{\docite[#1][]}} + +\def\docite[#1][#2]% + {\begingroup + \setupinteraction[\c!style=]% + \edef\temp{#2}% + \ifx\empty\temp \secondargumentfalse + \else \secondargumenttrue \fi + \ifsecondargument + \processcommalist[#2]\docitation + \doifassignmentelse + {#1}% + {\getparameters[LO][\c!alternative=,\c!extras=,#1]% + \edef\@@currentalternative{\LOalternative}% + \ifx\@@currentalternative\empty + \edef\@@currentalternative{\@@citedefault}% + \fi + \ifx\LOextras\empty + \setupcite[\@@currentalternative][#1]% + \else + \expandafter\ifx\csname LOright\endcsname \relax + \edef\LOextras{{\LOextras\bibalternative\c!right}}% + \else + \edef\LOextras{{\LOextras\LOright}}% + \fi + \expanded{\setupcite[\@@currentalternative][#1,\c!right=\LOextras]}% + \fi + }% + {\def\@@currentalternative{#1}}% + \expanded{% + \processaction[\csname @@pv\@@currentalternative \c!compress\endcsname]} + [ \v!yes=>\bibcitecompresstrue, + \v!no=>\bibcitecompressfalse, + \s!default=>\bibcitecompresstrue, + \s!unknown=>\bibcitecompresstrue]% + \getvalue{bib\@@currentalternative ref}[#2]% + \else + \processcommalist[#1]\docitation + \expanded{\processaction[\csname @@pv\@@citedefault \c!compress\endcsname]} + [ \v!yes=>\bibcitecompresstrue, + \v!no=>\bibcitecompressfalse, + \s!default=>\bibcitecompresstrue, + \s!unknown=>\bibcitecompresstrue]% + \edef\@@currentalternative{\@@citedefault}% + \getvalue{bib\@@citedefault ref}[#1]% + \fi + \endgroup} + +%D \macros{nocite} + +\def\nocite[#1]% + {\processcommalist[#1]\addthisref + \processcommalist[#1]\docitation } + +%D \macros{setupcite} + +\def\setupcite{\dodoubleempty\dosetupcite} + +\def\dosetupcite[#1][#2]% + {\ifsecondargument + \def\dodosetupcite##1{\getparameters[\??pv##1][#2]}% + \processcommalist[#1]\dodosetupcite + \else % default case + \getparameters[\??pv\@@citedefault][#1]% + \fi } + +%D Low-level stuff + +\def\getcitedata#1[#2]#3[#4]#5to#6% + {\bgroup + \addthisref{#4}% + \dofetchapublication{#4}% + \doifdefinedelse{@@pb@bib#2}% + {\xdef#6{\getvalue{@@pb@bib#2}}}% + {\xdef#6{\getvalue{@@pb@#2}}}% + \egroup} + +\def\dofetchapublication#1% + {\makepbkvalue{#1}% + \processcommacommand[\bibcommandlist,crossref]\clearbibitem + \processcommalist [artauthor,author,editor]\clearbibitemtwo + \processcommacommand[\bibcommandlist]\bibitemdefs + \processcommalist [artauthor,author,editor,crossref]\bibitemdefs + \getvalue{pbdd-#1}} + +%D This new version writes a reference out to the tui file for every +%D \type{\cite}. This will allow backlinking. +%D +%D Some special care is needed so that references are not added from +%D weird locations like in the TOC or within a \type{\setbox} command. + +\newcounter\citationnumber + +\def\docitation#1{% + \iftrialtypesetting \else + \ifdoinpututilities\else + \doglobal\increment\citationnumber + \expanded{\rawreference{}{cite-\jobname-\citationnumber}{#1}}% + \fi \fi } + +%D \macros{numreferred,doifreferredelse,addthisref,publist} +%D +%D The interesting command here is \type{\addthisref}, which maintains +%D the global list of references. +%D +%D \type{\numreferred} is needed to do automatic calculations on +%D the label width, and \type{\doifreferredelse} will be used +%D to implement \type{criterium=cite}. + +\newcounter\numreferred + +\long\def\doifreferredelse#1{\doifdefinedelse{pbr-#1}} + +\def\addthisref#1% + {\doifundefinedelse{pbr-#1} + {\setxvalue{pbr-#1}{\citationnumber}% + \doglobal\increment\numreferred + \ifx\publist\empty \gdef\publist{#1}\else\appended\gdef\publist{,#1}\fi} + {\setxvalue{pbr-#1}{\getvalue{pbr-#1},\citationnumber}}} + +\let\publist\empty + +%D \macros{doifbibreferencefoundelse} +%D +%D Some macros to fetch the information provided by +%D \type{\startpublication}. + +\def\doifbibreferencefoundelse#1% + {\preloadbiblist + \doifdefinedelse{pbdk-#1} + {\firstoftwoarguments} + {\showmessage\m!publications{5}{#1 is unknown}% + \secondoftwoarguments}} + +%D \macros{ixbibauthoryear,thebibauthors,thebibyears} +%D +%D If compression of \type{\cite}'s argument expansion is on, +%D the macros that deal with authors and years call this internal +%D command to do the actual typesetting. +%D +%D Two entries with same author but with different years may +%D be condensed into ``Author (year1,year2)''. This is about the +%D only optimization that makes sense for the (author,year) +%D style of citations (years within one author have to be unique +%D anyway so no need to test for that, and ``Author1, Author2 (year)'' +%D creates more confusion than it does good). +%D +%D In the code below, +%D the macro \type{\thebibauthors} holds the names of the alternative +%D author info fields for the current list. This is a commalist, +%D and \type{\thebibyears} holds the (collection of) year(s) that go with +%D this author (possibly as a nested commalist). +%D +%D There had better be an author for all cases, but there +%D does not have to be year info always. \type{\thebibyears} is +%D pre-initialized because this makes the insertion macros simpler. +%D +%D In `normal' \TeX, of course there are expansion problems again. + +\def\ixbibauthoryear#1#2#3#4% + {\bgroup + \gdef\ixlastcommand {#4}% + \gdef\ixsecondcommand{#3}% + \gdef\ixfirstcommand {#2}% + \glet\thebibauthors \empty + \glet\thebibyears \empty + \getcommalistsize[#1]% + \ifbibcitecompress + \dorecurse\commalistsize{\xdef\thebibyears{\thebibyears,}}% + \processcommalist[#1]\docompressbibauthoryear + \else + \processcommalist[#1]\donormalbibauthoryear + \fi + \egroup + \dobibauthoryear} + +%D \macros{dodobibauthoryear} +%D +%D This macro only has to make sure that the lists +%D \type{\thebibauthors} and \type{\thebibyears} are printed. + +\def\dobibauthoryear + {\scratchcounter\zerocount + \getcommacommandsize[\thebibauthors]% + \edef\authorcount{\commalistsize}% + \@EA\processcommalist\@EA[\thebibauthors]\dodobibauthoryear} + +\def\dodobibauthoryear#1% + {\advance\scratchcounter\plusone + \edef\wantednumber{\the\scratchcounter}% + \getfromcommacommand[\thebibyears][\wantednumber]% + \@EA\def\@EA\currentbibyear\@EA{\commalistelement}% + \setcurrentbibauthor{#1}% + \ifnum\scratchcounter=\plusone + \ixfirstcommand + \else\ifnum \scratchcounter=\authorcount\relax + \ixlastcommand + \else + \ixsecondcommand + \fi\fi} + +\def\setcurrentbibauthor#1% + {\getcommacommandsize[#1]% + \ifcase\commalistsize + % anonymous? + \def\currentbibauthor{}% + \or + \def\currentbibauthor{#1}% + \or + \expanded{\docurrentbibauthor#1}% + \else + \handlemultiplebibauthors{\commalistsize}{#1}% + \fi } + +\newcount\citescratchcounter + +\def\handlemultiplebibauthors#1#2% + {\citescratchcounter 0 + \def\currentbibauthor{}% + \def\bibprocessauthoritem##1% + {\advance\citescratchcounter1 + \ifnum \citescratchcounter=#1\relax + \edef\currentbibauthor{\currentbibauthor##1}% + \else \ifnum\numexpr\citescratchcounter+1 = #1\relax + \edef\currentbibauthor{\currentbibauthor ##1\bibalternative{andtext}}% + \else + \edef\currentbibauthor{\currentbibauthor ##1\bibalternative{namesep}}% + \fi + \fi }% + \processcommalist[#2]\bibprocessauthoritem } + +\setupcite + [author,authoryear,authoryears] + [\c!namesep={, }] + +%D This discovery of authoretallimit is not the best one, +%D but it will do for now. + +\def\docurrentbibauthor#1,#2% + {\doifemptyelse{#2} + {\def\currentbibauthor{#1\bibalternative{otherstext}}} + {\@EA + \ifx\csname \??pv\@@currentalternative authoretallimit\endcsname\relax + \edef\currentbibauthor{#1\bibalternative{andtext}#2}% + \else + \edef\currentbibauthor{#1% + \ifcase0\bibalternative{authoretallimit}\relax\or + \bibalternative{otherstext}\else\bibalternative{andtext}#2\fi}% + \fi}} + +%D This is not the one Hans made for me, because I need a global +%D edef, and the \type{\robustdoifinsetelse} doesn't listen to +%D \type{\doglobal } + +\def\robustaddtocommalist#1#2% {item} \cs + {\robustdoifinsetelse{#1}#2\resetglobal + {\dodoglobal\xdef#2{\ifx#2\empty\else#2,\fi#1}}} + +%D \macros{donormalbibauthoryear} +%D +%D Now we get to the macros that fill the two lists. +%D The `simple' one really is quite simple. + +\def\donormalbibauthoryear#1% + {\addthisref{#1}% + \def\myauthor{Xxxxxxxxxx}% + \def\myyear{0000}% + \doifbibreferencefoundelse{#1} + {\def\myauthor{{\getvalue{pbda-#1}}}% + \def\myyear {\getvalue{pbdy-#1}}}% + {}% + \@EA\doglobal\@EA\appendtocommalist\@EA{\myauthor}\thebibauthors + \@EA\doglobal\@EA\appendtocommalist\@EA{\myyear }\thebibyears} + +%D \macros{docompressbibauthoryear} +%D +%D So much for the easy parts. Nothing at all will be done if +%D the reference is not found or the reference does not contain +%D author data. No questions marks o.s.s. (to be fixed later) + +\def\docompressbibauthoryear#1% + {\addthisref{#1}% + \def\myauthor{Xxxxxxxxxx}% + \def\myyear {0000}% + \doifbibreferencefoundelse{#1} + {\xdef\myauthor{\csname pbda-#1\endcsname }% + \xdef\myyear {\csname pbdy-#1\endcsname }} + {}% + \ifx\myauthor\empty\else + \checkifmyauthoralreadyexists + \findmatchingyear + \fi} + +%D two temporary counters. One of these two can possibly be replaced +%D by \type{\scratchcounter}. + +\newcount\bibitemcounter +\newcount\bibitemwanted + +%D The first portion is simple enough: if this is the very first author +%D it is quite straightforward to add it. \type{\bibitemcounter} and +%D \type{\bibitemwanted} are needed later to insert the year +%D information in the correct item of \type{\thebibyears} + +\def\checkifmyauthoralreadyexists + {\doifemptyelsevalue{thebibauthors} + {\global\bibitemwanted \plusone + \global\bibitemcounter \plusone + \xdef\thebibauthors{{\myauthor}}} + {% the next weirdness is because according to \getcommalistsize, + % the length of \type{[{{},{}}]} is 2. + \@EA\getcommalistsize\@EA[\thebibauthors,]% + \global\bibitemcounter\commalistsize + \global\advance\bibitemcounter\minusone + \global\bibitemwanted \zerocount + \processcommacommand[\thebibauthors]\docomparemyauthor}} + +%D The outer \type{\ifnum} accomplishes the addition of +%D a new author to \type{\thebibauthors}. The messing about with +%D the two counters is again to make sure that \type{\thebibyears} +%D will be updated correctly.If the author {\it was} found, +%D the counters will stay at their present values and everything +%D will be setup properly to insert the year info. + +\def\docomparemyauthor#1% + {\global\advance\bibitemwanted \plusone + \def\mytempc{#1}% +% \message{authors: \myauthor <=>\mytempc \ifx\mytempc\myauthor :Y \else :N +% \meaning \myauthor, \meaning\mytempc\fi (\the\bibitemwanted = \the\bibitemcounter)}% + \ifx\mytempc\myauthor + \quitcommalist + \else + \ifnum\bibitemwanted = \bibitemcounter\relax + \global\advance\bibitemwanted \plusone + \global\bibitemcounter\bibitemwanted\relax + \@EA\doglobal\@EA\robustaddtocommalist\@EA{{\myauthor}}\thebibauthors + \fi + \fi} + +%D This macro should be clear now. + +\def\findmatchingyear + {\edef\wantednumber{\the\bibitemwanted}% + \getfromcommacommand[\thebibyears][\wantednumber]% + \ifx\commalistelement\empty + \edef\myyear{{\myyear}}% + \else + \edef\myyear{{\commalistelement, \myyear}}% + \fi + \edef\newcommalistelement{\myyear}% + \doglobal\replaceincommalist \thebibyears \wantednumber} + +%D \macros{preparebibrefprefix} +%D +%D The reference list only writes bare references when the criterium +%D is `all'. Otherwise, a prefix is added to make sure that pdfTeX +%D does not encounter duplicate named references. On the generation +%D side, this is not a big problem. \type{\preparebibrefprefix} +%D creates a suitable string to prepend if a prefix is needed. +%D +%D Because this macro is used within \type{\cite } that itself +%D can be used within lists like the table of contents, it needs +%D to save and restore \type{\savedalltoclevels} and +%D \type{\currentlevel} (\type{\dosetfilterlevel} needs to change +%D their values globally). + +\def\preparebibrefprefix + {\chardef\savedalltoclevels \alltoclevels + \let\savedcurrentlevel\currentlevel + \let\dosetfilterlevel\patcheddosetfilterlevel + \dosettoclevel\??li{pubs}% + \edef\bibrefprefix{\@@sectiontype\currentlevel\sectionseparator}% + \let\dosetfilterlevel\normaldosetfilterlevel + \global\let\currentlevel\savedcurrentlevel + \global\chardef\alltoclevels \savedalltoclevels } + +%D \macros{preparebibreflist} +%D +%D But this optional prefixing is a bit of a problem on the +%D other side. We would like to do \type{\goto{}[article-full]} +%D but can't do it like that, because the actual label may be +%D \type{1:2:0:3:4:article-full]} or so. The problem is solved +%D by building a commalist that looks like this: +%D \starttyping +%D \def\bibreflist% +%D {1:2:0:3:4:article-full, +%D 1:2:0:3:article-full, +%D 1:2:0:article-full, +%D 1:2:article-full, +%D 1:article-full, +%D article-full} +%D \stoptyping + +\def\preparebibreflist#1% + {\let\bibreflist\empty + \def\storeitem##1% + {\ifx\bibreflist\empty + \edef\prefix{##1\sectionseparator}% + \edef\bibreflist{\prefix#1,#1}% + \else + \edef\prefix{\prefix##1\sectionseparator}% + \edef\bibreflist{\prefix#1,\bibreflist}% + \fi}% + \expanded{\processseparatedlist[\bibrefprefix][\sectionseparator]}\storeitem } + +%D \macros{gotobiblink,inbiblink,atbiblink} +%D +%D The final task is looping over that list until a match is found. + +\newif\ifbibreffound + +\def\gotobiblink#1[#2]% + {\bgroup + \preparebibrefprefix + \preparebibreflist{#2}% + \global\bibreffoundfalse + \def\setuplink##1% + {\ifbibreffound\else + \doifreferencefoundelse + {##1} + {\global\bibreffoundtrue \goto{#1}[##1]}% + {}\fi}% + \processcommacommand[\bibreflist]\setuplink + \ifbibreffound \else \unknownreference{#2}\fi + \egroup } + +\def\atbiblink[#1]% + {\bgroup + \preparebibrefprefix + \preparebibreflist{#1}% + \global\bibreffoundfalse + \def\setuplink##1% + {\ifbibreffound\else + \doifreferencefoundelse + {##1} + {\global\bibreffoundtrue \at[##1]}% + {}\fi}% + \processcommacommand[\bibreflist]\setuplink + \ifbibreffound \else \unknownreference{#1}\fi + \egroup } + +\def\inbiblink[#1]% + {\bgroup + \preparebibrefprefix + \preparebibreflist{#1}% + \global\bibreffoundfalse + \def\setuplink##1% + {\ifbibreffound\else + \doifreferencefoundelse + {##1} + {\global\bibreffoundtrue \in[##1]}% + {}\fi}% + \processcommacommand[\bibreflist]\setuplink + \ifbibreffound \else \unknownreference{#1}\fi + \egroup } + +%D \macros{bibauthoryearref,bibauthoryearsref,bibauthorref,bibyearref} +%D +%D Now that all the hard work has been done, these are simple. +%D \type{\ixbibauthoryearref} stores the data in the macros +%D \type{\currentbibauthor} and \type{\currentbibyear}. + +\def\ifbibinteractionelse% + {\iflocation + \edef\test{\bibalternative\c!interaction}% + \ifx\test\v!stop + \@EA\@EA\@EA\secondoftwoarguments + \else + \@EA\@EA\@EA\firstoftwoarguments + \fi + \else + \@EA\secondoftwoarguments + \fi + } + +\def\bibmaybeinteractive#1#2% + {\ifbibcitecompress #2\else + \ifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}\fi } + +\def\bibauthoryearref[#1]% + {\ixbibauthoryear{#1}% + {\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween + \bibalternative\v!left{\currentbibyear}\bibalternative\v!right}} + {\bibalternative\c!pubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween + \bibalternative\v!left {\currentbibyear}\bibalternative\v!right}} + {\bibalternative\c!lastpubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween + \bibalternative\v!left {\currentbibyear}\bibalternative\v!right}}} + +\def\bibauthoryearsref[#1]% + {\bibalternative\v!left + \ixbibauthoryear{#1} + {\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}} + {\bibalternative\c!pubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}} + {\bibalternative\c!lastpubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}}% + \bibalternative\v!right} + +\def\bibauthorref[#1]% + {\bibalternative\v!left + \ixbibauthoryear{#1}% + {\bibmaybeinteractive{#1}{{\currentbibauthor}}} + {\bibalternative\c!pubsep \bibmaybeinteractive{#1}{{\currentbibauthor}}} + {\bibalternative\c!lastpubsep\bibmaybeinteractive{#1}{{\currentbibauthor}}}% + \bibalternative\v!right} + +\def\bibyearref[#1]% + {\bibalternative\v!left + \ixbibauthoryear{#1}% + {\bibmaybeinteractive{#1}{{\currentbibyear}}} + {\bibalternative\c!pubsep \bibmaybeinteractive{#1}{{\currentbibyear}}} + {\bibalternative\c!lastpubsep\bibmaybeinteractive{#1}{{\currentbibyear}}}% + \bibalternative\v!right} + +%D ML problems: + +%D \macros{bibshortref,bibkeyref,bibpageref,bibtyperef,bibserialref} +%D +%D There is hardly any point in trying to compress these. The only +%D thing that needs to be done is making sure that +%D the separations are inserted correctly. And that is +%D what \type{\refsep} does. + +\newif\iffirstref + +\def\refsep{\iffirstref\firstreffalse\else\bibalternative\c!pubsep\fi} + +\def\bibshortref[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobibshortref + \bibalternative\v!right} + +\def\dobibshortref#1% + {\addthisref{#1}\refsep + \doifbibreferencefoundelse{#1}{\gotobiblink{\getvalue{pbds-#1}}[#1]} + {\unknownreference{#1}}} + + +\def\bibserialref[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobibserialref + \bibalternative\v!right} + +\def\dobibserialref#1% + {\addthisref{#1}\refsep + \doifbibreferencefoundelse{#1}{\gotobiblink{\getvalue{pbdn-#1}}[#1]} + {\unknownreference{#1}}} + +\def\bibkeyref[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobibkeyref + \bibalternative\v!right} + +\def\dobibkeyref#1% + {\addthisref{#1}\refsep\gotobiblink{#1}[#1]} + +\def\gotoDOI#1#2% + {\ifbibinteractionelse + {\useURL[bibfooDoi#1][#2]% + \useURL[bibfoo#1][http://dx.doi.org/#2]% + \goto{\url[bibfooDoi#1]}[url(bibfoo#1)]} + {\hyphenatedurl{#2}}} + +\def\bibdoiref[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobibdoiref + \bibalternative\v!right} + +\def\dobibdoiref#1% + {\addthisref{#1}\refsep + \doifbibreferencefoundelse{#1}{\expanded{\gotoDOI{#1}{\getvalue{pbdo-#1}}}} + {\unknownreference{#1}}} + +\def\biburlref[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobiburlref + \bibalternative\v!right} + +\def\gotoURL#1#2% + {\ifbibinteractionelse + {\useURL[bibfoo#1][#2]\goto{\url[bibfoo#1]}[url(bibfoo#1)]} + {\hyphenatedurl{#2}}} + +\def\dobiburlref#1% + {\addthisref{#1}\refsep + \doifbibreferencefoundelse{#1}{\expanded{\gotoURL{#1}{\getvalue{pbdu-#1}}}} + {\unknownreference{#1}}} + +\def\bibtyperef[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobibtyperef + \bibalternative\v!right} + +\def\dobibtyperef#1% + {\addthisref{#1}\refsep + \doifbibreferencefoundelse{#1}{\gotobiblink{\getvalue{pbdt-#1}}[#1]} + {\unknownreference{#1}}} + +\def\bibpageref[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobibpageref + \bibalternative\v!right} + +\def\dobibpageref#1% + {\addthisref{#1}\refsep + \ifbibinteractionelse{\atbiblink[#1]}{{\referencingfalse\at[#1]}}} + +\def\bibdataref[#1]% + {\bibalternative\v!left + \firstreftrue\processcommalist[#1]\dobibdata + \bibalternative\v!right} + +\def\dobibdata#1% + {\addthisref{#1}\refsep + \doifbibreferencefoundelse{#1}{\dotypesetapublication{#1}} + {\unknownreference{#1}}} + +\let\bibnoneref\nocite + +%D \macros{bibnumref} +%D +%D It makes sense to try and compress the argument list of +%D \type{\bibnumref}. There are two things involved: the actual +%D compression, and a sort routine. The idea is to store the +%D found values in a new commalist called \type{\therefs}. + +%D But that is not too straight-forward, because \type{\in} is +%D not expandable, +%D so that the macro \type{\expandrefs} is needed. + +\def\expandrefs#1% + {\bgroup + \preparebibrefprefix + \preparebibreflist{#1}% + \global\bibreffoundfalse + \def\setuplink##1% + {\ifbibreffound\else + \doifreferencefoundelse + {##1} + {\global\bibreffoundtrue + \@EA\doglobal\@EA\addtocommalist\@EA{\reftypet}\therefs }% + {}\fi}% + \processcommacommand[\bibreflist]\setuplink + \ifbibreffound \else \showmessage\m!publications{5}{#1 unknown}% + \doglobal\addtocommalist{0}\therefs\fi + \egroup } + +%D But at least the actual sorting code is simple (note that sorting +%D a list with exactly one entry fails to return anything, which +%D is why the \type{\ifx} is needed). + +\def\bibnumref[#1]% + {\bibalternative\v!left + \penalty\!!tenthousand + \processcommalist[#1]\addthisref + \firstreftrue + \ifbibcitecompress + \glet\therefs\empty + \processcommalist[#1]\expandrefs + \sortcommacommand[\therefs]\donumericcompare + \ifx\empty\sortedcommalist\else + \let\therefs\sortedcommalist + \fi + \compresscommacommandnrs[\therefs]% + \processcommacommand[\compressedlist]\verysimplebibnumref + \else + \processcommalist[#1]\dosimplebibnumref + \fi + \bibalternative\v!right} + +\def\dosimplebibnumref #1% + {\refsep\ifbibinteractionelse + {\inbiblink[#1]}{{\referencingfalse\inbiblink[#1]}}} + +\def\verysimplebibnumref#1{\doverysimplebibnumref#1} + +\def\doverysimplebibnumref#1#2% + {\refsep + \ifcase#1\relax \unknownreference{#1}\else + \def\tempa{#2}\ifx\empty\tempa#1\else#1\bibalternative\c!inbetween#2\fi + \fi} + +%D By request from Sanjoy. This makes it easier to implement +%D \type{\citeasnoun}. + +\def\bibauthornumref[#1]% + {\getcommalistsize[#1]% + \global\bibitemcounter\commalistsize + \firstreftrue + \processcommalist[#1]\dobibauthornumref } + +\def\dobibauthornumref#1% + {\addthisref{#1}\refsep + \doifbibreferencefoundelse{#1} + {\getvalue{pbda-#1}% + \bibalternative\c!inbetween + \bibalternative\v!left + \ifbibinteractionelse{\inbiblink[#1]} + {{\referencingfalse\inbiblink[#1]}}% + \bibalternative\v!right} + {\unknownreference{#1}}} + +%D And some defaults are loaded from bibl-apa: + +\setuppublications + [\v!month\v!conversion=, + \c!alternative=apa] + +\appendtoks + \preloadbiblist +\to \everystarttext + +\protect \endinput diff --git a/Master/texmf-dist/tex/context/base/bibl-tra.mkiv b/Master/texmf-dist/tex/context/base/bibl-tra.mkiv new file mode 100644 index 00000000000..519af0c6f77 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/bibl-tra.mkiv @@ -0,0 +1,1552 @@ +%D \module +%D [ file=bibl-tra, +%D version=2009.08.22, +%D title=\CONTEXT\ Publication Module, +%D subtitle=Publications, +%D author=Taco Hoekwater, +%D date=\currentdate, +%D copyright=Public Domain] +%C +%C Donated to the public domain. + +%D This module has been adapted to \MKIV\ by Hans Hagen so if things go wrong, +%D he is to blame. The changes concern references and lists but teh rendering +%D itself is unchanged. Future versions might provide variants as we have plans +%D for an upgrade. +%D +%D We use a still somewhat experimental extension to the list +%D mechanism. Eventually the bibtex module will use the bibl loader +%D and access the data by means of lpath expressions. In that case we +%D don't need to process the bibliography but still need to track +%D usage as done here. +%D +%D A bit ongoing: make more local macros prefixed with bib, i.e. the bib +%D namespace is reserved. + +\writestatus{loading}{ConTeXt Bibliography Support / BibTeX} + +\definefilesynonym[bib][obsolete] + +\registerctxluafile{bibl-tra}{1.001} + +%D The original was developed independantly by Taco Hoekwater while still working for Kluwer +%D Academic publishers (it still used the dutch interface then). Development continued after +%D he left Kluwer, and in Januari 2005, the then already internationalized file was merged +%D with the core distribution by Hans Hagen. The current version is once again by Taco. +%D +%D More documentation and additional resources can be found on the contextgarden: +%D \hyphenatedurl{http://wiki.contextgarden.net//Bibliography}. + +%D \subject{DONE (dd/mm/yyyy)} +%D +%D \startitemize +%D \item add author definition (and associated system variable) (26/05/2005) +%D \item add finalnamesep support for Oxford comma (17/09/2005) +%D \item add \type{\insert...} for: doi, eprint, howpublished (19/09/2005) +%D \item allow a defaulted \type{\setupcite} (19/11/2005) +%D \item renamed citation type 'number' to 'serial' (19/11/2005) +%D \item better definition of \type{\inverted...author} (19/11/2005) +%D \item don't reset [numbercommand] in \type {\setuppublication} by default (20/11/2005) +%D \item don't disable other \type {\setuppublication} keys if alternative is present (20/11/2005) +%D \item drop \type{\sanitizeaccents} (20/11/2005) +%D \item added \type{\nocite} and \type{\cite[none]} (21/11/2005) +%D \item added headtext for it (23/11/2005) +%D \item make \type{\cite[url]} and \type{\cite[doi]} interactive (23/11/2005) +%D \item make right-aligned labels in the list work even when autohang=no +%D \item use 'et al.' instead of 'et.al.'. Pointed out by Peter M\"unster (30/12/2005) +%D \item added headtext for cz (31/12/2005) +%D \item Keep whitespace after \type{\cite} with single argument (31/12/2005) +%D \item Fix broken \type{\cite{}} support (31/12/2005) +%D \item Use \type{\readfile} inside \type{\usepublications} instead of \type{\readsysfile} (12/01/2006) +%D \item Use \type{\currentbibyear} and \type{\currentbibauthor} instead of \type{\YR} and \type{\AU} (05/02/2006) +%D \item Fix compressed version of authoryear style (05/02/2006) +%D \item Rename the clashing data fields \type{\url} and \type{\type} to \type{\biburl} and \type{\bibtype} (05/02/2006) +%D \item Added two french bibl files from Renaud Aubin (06/02/2006) +%D \item Five new bib class and eight extra bib fields, for IEEEtran (07/02/2006) +%D \item French keyword translation, provided by Renaud (08/02/2006) +%D \item fix underscores in undefined keys (22/02/2006) +%D \item Destroy interactivity in labels of the publication list (13/03/2006) +%D \item fix multi-cite list compression (11/4/2006) +%D \item fix \type{\getcitedata} (11/4/2006) +%D \item magic for chapter bibs (18-25/4/2006) +%D \item language setting (25/4/2006) +%D \item use \type{\hyphenatedurl} for \type{\inserturl} (25/4/2006) +%D \item Add \type{\docitation} to \type{\nocite}(26/4/2006) +%D \item patents can have numbers, added to bst files (26/4/2006) +%D \item \type{\docitation} needs a \type{\iftrialtypesetting} (27/4/2006) +%D \item \type{\filllocalpublist}'s loop is bound by definedness, not resolvedness (27/4/2006) +%D \item \type{\setuppublications[monthconversion=]} added (15/5/2006) +%D \item use \type{\undefinedreference} instead of bare question marks (15/5/2006) +%D \item add grouping around \type{\placepublications} commands (16/5/2006) +%D \item fix a bug in \type{\cite{}} (17/5/2006) +%D \item support \type{\cite[authornum]} (18/5/2006) +%D \item make \type{\cite} unexpandable (20/6/2006) +%D \item allow hyperlinks in author\&year combo's +%D (cite list compression has to be off) (20/6/2006) +%D \item fix duplicate labels for per-chapter style (20/6/2006) +%D \item allow \type{\setupcite[interaction=(start|stop)]} +%D \item fix the item number in the publication list with 'numbering=yes' (22/6/2006) +%D \item make the default criterium for \type{\placepublications} be \type{previous} (23/6/2006) +%D \item fix \type{\normalauthor} and \type{\normalshortauthor} spacing (29/6/2006) +%D \item do not typeset empty arguments to \type{\typesetapublication} (29/6/2006) +%D \item add \type{symbol=none} to \type{\setuplist} in unnumbered +%D mode to prevent typesetting of bare numbers (29/6/2006) +%D \item remove two incorrect spaces from bibl-num.tex (1/7/2006) +%D \item reset font styles within \type{\cite}, so that font switches +%D in \type{left} stay in effect (12/7/2006) +%D \item guard added against loading bbl files multiple times (13/7/2006) +%D \item fix \type{\cite[num]} with compression is on. (14/7/2006) +%D \item test \type{\iflocation} before deciding to use the +%D interactive version of cite (18/7/2006) +%D \item support \type{\setupcite[authoretallimit=1]} (18/7/2006) +%D \item support use of \type{\cite} within titles and captions by +%D saveguarding the list item extraction and reference placement +%D code (19/7/2006) +%D \item support \type{\setuppublicationlist[title=\chapter]} (4/8/2006) +%D \item use the expansion of \type{\headtext{pubs}} (4/8/2006) +%D \item hook added for repeated authors in publication list +%D \type{\setuppublicationlist[artauthorcommand=\mythreeargscommand]} +%D (4/8/2006) +%D \item make the bracketed arguments of \type{\artauthor}, \type{\author} +%D and \type{\editor} (bbl commands) optional (4/8/2006) +%D \item the constants \type{sorttype}, \type{compress} and +%D \type{autohang} have moved to the core (8/8/2006) +%D \item bibtex is now registered as a program to be run by texexec (8/8/2006) +%D \item fix a bug in \type{\setupcite[authoretallimit=1]} (9/8/2006) +%D \item fix a bug inside citations that prevented lastpubsep from ever being +%D used due to a volatile \type{\commalistsize} (25/8/2006). +%D \item added the possibility of \type{\placepublications[option=continue]} +%D (6/9/2006) +%D \item Mojca translated Master's Thesis to Masterarbeit (bibl-apa-de.tex) +%D (12/9/2006) +%D \item Added \type{\setuppublicationlist[maybeyear=off]} by request from +%D Thomas Schmitz (15/9/2006) +%D \item Removed some spurious spaces pointed out by willi egger (19/9/2006) +%D \item Add configuration of bibtex executable name (4/11/2006) +%D \item Fix numbering=short and numbering=bib (spotted by Matthias W\"achter) (4/11/2006) +%D \item third attempt to get a correct release (5/11/2006) +%D \item fix a few missing dots in bibl-num.tex (7/12/2006) +%D \item Patch for DOI's by Tobias Burnus (17/4/2007) +%D \item Patch for \type{\insertbiburl} and \type{\insertdoi} for Tobias Burnus (18/4/2007) +%D \item Added a missing \type{\relax} in \type{\dospecialbibinsert}, +%D that made the space before the {\it et al.} text disappear. (18/4/2007) +%D \item Attempt to fix percent signs in bbl files. As a side-effect, +%D this prohibits comments in \tex{startpublication} blocks! (17/4/2008) +%D \item Patch from Matthias W\"achter that allows arbitrary .bst +%D files to be used with \tex{setupbibtex} (25/9/2008) +%D \item Extended for the new multilingual setups for the Oct 2008 current of ConTeXt (23/10/2008) +%D \item Multilingual setups needed another fix (27/10/2008) +%D \item Two fixes for bibl-apa by Michael Green (27/10/2008) +%D \item Catalan translation of 'References' (10/11/2008) +%D \item 'chapter' -> 'chapitre' in bibl-apa-fr (27/11/2008) +%D \item Run bibtex via os.execute in mkiv modee (01/12/2008) +%D \item Small correction in bibl-apa's placement of volume +%D information in articles (05/01/2009) +%D \item Handle multi-author (more than two) cases in \type{\cite} +%D (02/03/2009) +%D \item Suppress a syntax error in \type{cont-xp} mode. The output is +%D probably not right, though (02/03/2009) +%D \item Added a \tex{loadmarkfile} at the end, and two new files +%D from Hans. The \type{t-bib.mkiv} is needed to make the module +%D work with the new structure code (17/04/2009) +%D \item Added a patch to \type{t-bib.mkiv} from Hans to make the +%D cross referencing between multiple citations an +%D bibliographies work (27/04/2009) +%D \item Remove a superfluous \type{\unprotect} in t-bib.mkiv (11/05/2009). +%D \item Patch of incollection in bibl-ams.tex from Xan (08/06/2009). +%D \item Patch of unpublished in bibl-ams.tex from Xan (22/07/2009). +%D \item Modified \type{\bibdogetupsometextprefix} so it works for undefined +%D language labels, from Hans (13/08/2009). +%D \item Adapt referencing and list insertion to \MKIV. Update some code +%D to the latest \CONTEXT. Change some names in order to avoid conflicts +%D with existing core names (like \type {\insertpages}). +%D \item All constants, variables, message etc.\ are now in the core. +%D \item Added key: \type {method} (when \type {global}, previous shown entries are +%D not shown again, when \type {local} they are repeated). +%D \stopitemize +%D +%D \subject{WISHLIST} +%D +%D \startitemize +%D \item link back from publication list to citation +%D \item export \type {\citation{}} +%D \item support mlbibtex +%D \item don't load the whole lot, but filter entries instead +%D \item 9 vs 10, 19 vs 20 ... prevent extra runs when only subtle changes in wd of reference +%D \stopitemize + +\unprotect + +\def\biblistname{pubs} % for compatibility + +\definelist + [pubs] + +\setuplist + [pubs] + [\c!state=\v!start, + \c!width=] + +\installstructurelistprocessor{pubs:userdata}% + {\ctxlua{bibtex.hacks.add(structure.lists.uservalue("\currentlist",\currentlistindex,"bibref"),\currentlistindex)}} + +\newcount\bibtexblock \bibtexblock\plusone + +%D \macros{bibdoif,bibdoifnot,bibdoifelse} +%D +%D Here are a few small helpers that are used a lot in all the typesetting commands +%D (\type{\bibinsert...}) we will encounter later. + +\long\def\bibdoifelse#1% + {\@EA\def\@EA\!!stringa\@EA{#1}% + \ifx\!!stringa\empty + \expandafter\secondoftwoarguments + \else + \expandafter\firstoftwoarguments + \fi} + +\long\def\bibdoifnot#1% + {\@EA\def\@EA\!!stringa\@EA{#1}% + \ifx\!!stringa\empty + \expandafter\firstofoneargument + \else + \expandafter\gobbleoneargument + \fi} + +\long\def\bibdoif#1% + {\@EA\def\@EA\!!stringa\@EA{#1}% + \ifx\!!stringa\empty + \expandafter\gobbleoneargument + \else + \expandafter\firstofoneargument + \fi} + +%D Unfortunately, \BIBTEX\ is not the best configurable program +%D around. The names of the commands it parses as well as the \type{.aux} +%D extension to the file name are both hardwired. +%D +%D This means \CONTEXT\ has to write a \LATEX-style auxiliary file, yuk! +%D The good news is that it can be rather short. We'll just ask +%D \BIBTEX\ to output the entire database(s) into the \type{bbl} file. +%D +%D The \type{\bibstyle} command controls how the \type{bbl} file will +%D be sorted. The possibilities are: +%D +%D \startitemize[packed] +%D \item by author (+year, title): cont-au.bst +%D \item by title (+author, year): cont-ti.bst +%D \item by short key as in abbrev.bst: cont-ab.bst +%D \item not sorted at all: cont-no.bst +%D \stopitemize + +\newtoks\everysetupbibtex + +\unexpanded\def\setupbibtex + {\dosingleempty\dosetupbibtex} + +\def\dosetupbibtex[#1]% + {\let\@@pbdatabase\empty + \getparameters[\??pb][#1]% + \the\everysetupbibtex} + +\def\installbibtexsorter#1#2% + {\setvalue{\??pb:\c!sort:#1}{#2}} + +\installbibtexsorter\v!no {no} +\installbibtexsorter\v!author {au} +\installbibtexsorter\v!title {ti} +\installbibtexsorter\v!short {ab} +\installbibtexsorter\empty {no} +\installbibtexsorter\s!default{no} + +\def\thebibtexsorter{\executeifdefined{\??pb:\c!sort:\@@pbsort}\@@pbsort} + +\appendtoks + \ifx\@@pbdatabase\empty\else + \doifmode{*\v!first}{\ctxlua{bibtex.hacks.process { style="\thebibtexsorter", database="\@@pbdatabase" }}}% + \fi +\to \everysetupbibtex + +\setupbibtex + [\c!sorttype=\v!cite, + \c!sort=no] + +%D \macros{iftypesetall,ifbibcitecompress} +%D +%D The module needs some new \type{\if} statements. + +\newtoks\everysetuppublications + +\unexpanded\def\setuppublications + {\dosingleargument\dosetuppublications} + +\def\dosetuppublications[#1]% + {\getparameters[\??pb][\c!alternative=,#1]% + \doifsomething\@@pbalternative + {\readsysfile{bibl-\@@pbalternative.tex} + {\showmessage\m!publications{6}{bibl-\@@pbalternative}} + {\showmessage\m!publications{1}{bibl-\@@pbalternative}}% + \let\@@pbalternative\empty}% + \let\setuppublicationlayout\normalsetuppublicationlayout % overloaded in bibl-num ... vadjust needs to be done with option + \getparameters[\??pb][#1]% as bibl-* can have set things back + \the\everysetuppublications + \ignorespaces} + +%D We can omit already shown references (\v!global) or use fresh +%D lists each time (\v!local). + +\chardef\bibtexoncemode\plusone % 0=disable, 1=local, 2=global + +\appendtoks + \doifelse\@@pbmethod\v!local + {\chardef\bibtexoncemode\plusone}% + {\chardef\bibtexoncemode\plustwo}% +\to \everysetuppublications + +%D Cite lists are compressed, if possible. This is set later on. + +\newif\ifbibcitecompress\bibcitecompresstrue + +% \appendtoks +% \processaction +% [\@@pbnumbering] +% [ \v!yes=>\let\@@pbinumbercommand\firstofoneargument, +% \v!no=>\let\@@pbinumbercommand\gobbleoneargument, +% \v!short=>\def\@@pbinumbercommand##1{\bibgetvars\currentpublicationkey}, +% \v!bib=>\def\@@pbinumbercommand##1{\bibgetvarn\currentpublicationkey}, +% \s!unknown=>\let\@@pbinumbercommand\firstofoneargument]% +% \to \everysetuppublications + +\def\@@pbinumbercommand{\executeifdefined{\??pb:\c!numbercommand:\@@pbnumbering}\firstofoneargument} + +\letvalue{\??pb:\c!numbercommand:\v!yes }\firstofoneargument +\letvalue{\??pb:\c!numbercommand:\v!no }\gobbleoneargument +\setvalue{\??pb:\c!numbercommand:\v!short}#1{\bibgetvars\currentpublicationkey} +\setvalue{\??pb:\c!numbercommand:\v!bib }#1{\bibgetvarn\currentpublicationkey} + +% to be tested +% +% \setvalue{\??pb:\c!numbercommand:\v!short}{\bibgetvars\currentpublicationkey\firstofoneargument} +% \setvalue{\??pb:\c!numbercommand:\v!bib }{\bibgetvarn\currentpublicationkey\firstofoneargument} + +\appendtoks + \processaction + [\@@pbrefcommand] + [\s!default=>\edef\@@citedefault{\@@pbrefcommand}, + \s!unknown=>\edef\@@citedefault{\@@pbrefcommand}]% +\to \everysetuppublications + +\def\bibleftnumber#1{#1\hfill~} + +%D \macros{usepublications} +%D +%D After discussing it with Thomas Schmitz it became clear that using external +%D references makes no sense as one needs to refer to it in special ways and +%D because similar numbers can be confusing. So, for the moment this is not +%D supported in \MKIV. (So no: see reference [3-5,9] in "some other document") + +\def\usepublications[#1]% + {\processcommalist[#1]\dousepublications} + +% \def\dousepublications#1% +% {\doonlyonce{#1.\f!bibextension} +% {\readfile{#1.\f!bibextension} +% {\showmessage\m!publications{4}{#1.\f!bibextension}} +% {\showmessage\m!publications{2}{#1.\f!bibextension}}}} + +\def\dousepublications#1% + {\doonlyonce{#1.\f!bibextension}{\dodousepublications{#1}}} + +\def\dodousepublications#1% + {\let\@@savedpar\par + \let\par\ignorespaces + \ifhmode\kern\zeropoint\fi + \readfile{#1.\f!bibextension} + {\showmessage\m!publications{4}{#1.\f!bibextension}} + {\showmessage\m!publications{2}{#1.\f!bibextension}}% + \ifhmode\removeunwantedspaces\fi + \let\par\@@savedpar} + +%D \macros{setuppublicationlist} +%D +%D This will be the first command in (\BIBTEX-generated) \type{bbl} +%D files. `samplesize' is a sample value (in case of \BIBTEX-generated +%D files, this will be the longest `short' key). `totalnumber' +%D is the total number of entries that will follow in this +%D file. +%D +%D Both values are only needed for the label calculation +%D if `autohang' is `true', so by default the command is +%D not even needed, and therefore I saw no need to give +%D it it's own system variable and it just re-uses \type{pb}. + +\def\publicationlistparameter#1{\csname\??pv:l:#1\endcsname} + +\unexpanded\def\setuppublicationlist + {\dosingleempty\dosetuppublicationlist} + +\def\dosetuppublicationlist[#1]% + {\getparameters[\??pv:l:][#1]% + \setuplist[pubs][\c!samplesize={AA99},\c!alternative=a,\c!interaction=,\c!pagenumber=\v!no,#1,\c!command=]} + +\unexpanded\def\setuppublicationlayout[#1]#2% + {\setvalue{\??pv:l:#1}{#2}} + +\let\normalsetuppublicationlayout\setuppublicationlayout + +\setuppublicationlist[\c!title=,\c!command=\dospecialbibinsert,\c!maybeyear=\v!on] + +%D \macros{bibalternative} +%D +%D A nice little shorthand that will be used so we don't have to +%D key in the weird \type{\@@pv} parameter names all the time. + +\def\bibalternative#1% + {\csname\??pv\@@currentalternative#1\endcsname} + +%D \macros{simplebibdef,bibcommandlist} +%D +%D \type{\simplebibdef} defines \type{bib@#1}, which in turn will +%D use one argument that is stored in \type{@@pb@#1}. +%D +%D \type{\simplebibdef} also defines \type{bibinsert#1}, which can be +%D used in the argument of \type{\setuppublicationlayout} to fetch +%D one of the \type{@@pb@} data entries. \type{bibinsert#1} then has +%D three arguments: \type{#1} are commands to be executed before the +%D data, \type{#2} are commands to be executed after the data, and +%D \type{#3} are commands to be executed if the data is not found. +%D +%D \type{\bibcommandlist} is the list of commands that is affected +%D by this approach. Later on, it will be used to do a series +%D of assignments from \type{#1} to \type{bib@#1}: e.g +%D \type{\title} becomes \type{\bib@title} when used within +%D a publication. + +\newtoks\initializebibdefinitions % we need to prevent clashes + +\def\simplebibdef#1% hh: funny expansion ? + {\@EA\long\@EA\def\csname bib@#1\endcsname##1% + {\setvalue{\??pb @#1}{##1}\ignorespaces}% + \expandafter \appendtoks + \expandafter\let\csname insert#1\expandafter\endcsname\csname bibinsert#1\endcsname + \to \initializebibdefinitions + \@EA\unexpanded\@EA\def\csname bibinsert#1\endcsname##1##2##3% + {\@EA\bibdoifelse\@EA{\csname\??pb @#1\endcsname}{##1\csname\??pb @#1\endcsname##2}{##3}}} + +\def\bibcommandlist + {abstract, annotate, arttitle, assignee, bibnumber, bibtype, biburl, chapter, city, + comment, country, day, dayfiled, doi, edition, eprint, howpublished, isbn, issn, + issue, journal, keyword, keywords, lastchecked, month, monthfiled, names, nationality, + note, notes, organization, pages, pubname, pubyear, revision, series, size, thekey, + title, volume, yearfiled} + +\processcommacommand[\bibcommandlist]\simplebibdef + +% \def\bibinsertdoi#1#2#3% +% {\begingroup +% \bibdoifelse{\@@pb@doi}% +% {\edef\ascii{\detokenize\expandafter{\@@pb@doi}}% probably not ok, to less expansion +% #1\expanded{\bibgotoDOI{\@@pb@thekey}{\ascii}}#2}{#3}% +% \endgroup} +% +% \def\bibinsertbiburl#1#2#3% +% {\begingroup +% \bibdoifelse{\@@pb@biburl}% +% {\edef\ascii{\detokenize\expandafter{\@@pb@biburl}}% probably not ok, to less expansion +% #1\expanded{\bibgotoURL{\@@pb@thekey}{\ascii}}#2}{#3}% +% \endgroup} + +\def\bibinsertdoi#1#2#3% let's see how this fails + {\bibdoifelse{\@@pb@doi}{#1\expanded{\bibgotoDOI{\@@pb@thekey}{\@@pb@doi}}#2}{#3}} + +\def\bibinsertbiburl#1#2#3% let's see how this fails + {\bibdoifelse{\@@pb@biburl}{#1\expanded{\bibgotoURL{\@@pb@thekey}{\@@pb@biburl}}#2}{#3}} + +\def\bibinsertmonth#1#2#3% + {\bibdoifelse\@@pb@month + {#1\doifnumberelse\@@pb@month + {\doifconversiondefinedelse\@@pbmonthconversion + {\convertnumber\@@pbmonthconversion\@@pb@month}{\@@pb@month}}% + {\@@pb@month}#2}% + {#3}} + +\appendtoks + \let\inserturl \bibinsertbiburl % for backward compat. + \let\inserttype\bibinsertbibtype % for backward compat. +\to\initializebibdefinitions + +\def\newbibfield[#1]% + {\simplebibdef{#1}% + \edef\bibcommandlist{\bibcommandlist,#1}} + +%D \macros{complexbibdef,specialbibinsert} +%D +%D The commands \type{\artauthor}, \type{\author} and +%D \type{\editor} are more complex than the other commands. +%D Their argument lists have this form: +%D +%D \type{\author[junior]{firstnames}[inits]{von}{surname}} +%D +%D (bracketed stuff is optional) +%D +%D And not only that, but there also might be more than one of each of +%D these commands. This is why a special command is needed to insert +%D them, as well as one extra counter for each command. + +% todo: instead of \getvalue{bla@num} in specs we should do +% \bibentrynum{bla} so that we can create a better namespace + +%D All of these \type{\@EA}'s and \type{\csnames} make this code +%D look far more complex than it really is. For example, the argument +%D \type{author} defines the macro \type{\bib@author} to do two +%D things: increment the counter \type{\author@num} (let's say to 2) +%D and next store it's arguments in the macro \type{\@@pb@author2}. +%D And it defines \type{\bibinsertauthors} to expand into +%D \starttyping +%D \specialbibinsert{author}{\author@num}{}{}{} +%D \stoptyping + +% \def\docomplexbibdef#1% +% {\def\currentype{#1}% +% \dosingleempty\dodocomplexbibdef} + +% \def\dodocomplexbibdef[#1]#2% +% {\def\firstarg{#1}\def\secondarg{#2}% +% \dosingleempty\dododocomplexbibdef} + +% \def\dododocomplexbibdef[#1]#2#3% +% {\@EA\increment\csname\currentype @num\endcsname +% \setevalue{\??pb @\currentype\csname \currentype @num\endcsname}% +% {{\secondarg}{#2}{#3}{#1}{\firstarg}}\ignorespaces} + +\def\docomplexbibdef#1% + {\dodoubleempty\dodocomplexbibdef[#1]} + +\def\dodocomplexbibdef[#1][#2]#3% + {\doquadrupleempty\dododocomplexbibdef[#1][#2][#3]} + +\def\dododocomplexbibdef[#1][#2][#3][#4]#5#6% + {\@EA\increment\csname#1@num\endcsname % todo: bib in name + \setevalue{\??pb @#1\csname#1@num\endcsname}{{#3}{#5}{#6}{#4}{#2}}\ignorespaces} + +\def\complexbibdef#1% + {\@EA\newcounter\csname #1@num\endcsname + \@EA\def\csname bib@#1\endcsname{\docomplexbibdef{#1}}% + \expandafter \appendtoks + \expandafter\let\csname insert#1s\expandafter\endcsname\csname bibinsert#1s\endcsname + \to \initializebibdefinitions + \@EA\def\csname bibinsert#1s\endcsname##1##2##3{\specialbibinsert{#1}{\csname #1@num\endcsname}{##1}{\unskip ##2}{##3}}} + +\processcommalist[author,artauthor,editor]\complexbibdef + +%D Another level of indirection is needed to control the +%D typesetting of all of these arguments. + +\newcount\etallimitcounter +\newcount\etaldisplaycounter +\newcount\todocounter + +\def\specialbibinsert#1#2#3#4#5% + {\bgroup + \ifnum#2>\zerocount + \etallimitcounter =0\bibalternative{#1etallimit}\relax + \etaldisplaycounter=0\bibalternative{#1etaldisplay}\relax + \ifnum #2>\etallimitcounter + \todocounter\etaldisplaycounter + % just in case ... + \ifnum\todocounter>\etallimitcounter + \todocounter\etallimitcounter + \fi + \else + \todocounter#2\relax + \fi + \ifnum\todocounter>\zerocount + % find the current author list + \let\templist\empty + \dorecurse{#2} + {\scratchtoks\@EA\@EA\@EA{\csname\??pb @#1\recurselevel\endcsname}% + \edef\templist{\ifx\templist\empty\else\templist,\fi\the\scratchtoks}}% + #3\publicationlistparameter\c!command{#1}{\todocounter}{\templist}#4\relax + \else + #5% + \fi + \else + #5% + \fi + \egroup} + +%D This macro does the hard work of inserting a list of people in the +%D output, with proper regard of all the inbetween strings that can +%D arise depending on length of the list of people. + +%D \#1 = type +%D \#2 = number of items to be typeset +%D \#3 = commacommand containing authors + +% \def\dospecialbibinsert#1#2#3% +% {\getcommacommandsize[#3]% +% \scratchcounter\zerocount +% \def\processauthoritem##1% +% {\advance\scratchcounter\plusone +% \ifnum\numexpr\scratchcounter-\plusone\relax<#2\relax +% \publicationlistparameter{#1}##1% +% \ifnum\scratchcounter=#2\relax +% \ifnum\etallimitcounter<\commalistsize\relax +% \bibalternative{#1etaltext}% +% \fi +% \else\ifnum\numexpr\scratchcounter+\plusone\relax=#2\relax +% \ifnum\commalistsize>\plustwo +% \bibalternative\c!finalnamesep +% \else +% \bibalternative\c!lastnamesep +% \fi +% \else +% \bibalternative\c!namesep +% \fi\fi +% \fi}% +% \processcommacommand[#3]\processauthoritem} + +\def\doprocessauthoritem#1#2#3% + {\advance\scratchcounter\plusone + \ifnum\numexpr\scratchcounter-\plusone\relax<#2\relax + \publicationlistparameter{#1}#3% + \ifnum\scratchcounter=#2\relax + \ifnum\etallimitcounter<\commalistsize\relax + \bibalternative{#1etaltext}% + \fi + \else\ifnum\numexpr\scratchcounter+\plusone\relax=#2\relax + \ifnum\commalistsize>\plustwo + \bibalternative\c!finalnamesep + \else + \bibalternative\c!lastnamesep + \fi + \else + \bibalternative\c!namesep + \fi\fi + \fi} + +\def\dospecialbibinsert#1#2#3% + {\getcommacommandsize[#3]% + \scratchcounter\zerocount + \processcommacommand[#3]{\doprocessauthoritem{#1}{#2}}} + +%D \macros{invertedauthor,normalauthor,invertedshortauthor,normalshortauthor} +%D +%D Just some commands that can be used in \type{\setuppublicationparameters} +%D If you want to write an extension to the styles, you might +%D as well define some of these commands yourself. +%D +%D The argument list has been reordered here, and the meanings +%D are: +%D +%D \startlines +%D \type{#1} firstnames +%D \type{#2} von +%D \type{#3} surname +%D \type{#4} inits +%D \type{#5} junior +%D \stoplines + +\def\normalauthor#1#2#3#4#5% + {\bibdoif{#1}{#1\bibalternative\c!firstnamesep}% + \bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!surnamesep#5\unskip}} + +\def\normalshortauthor#1#2#3#4#5% + {\bibdoif{#4}{#4\bibalternative\c!firstnamesep}% + \bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!surnamesep#5\unskip}} + +\def\invertedauthor#1#2#3#4#5% + {\bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!juniorsep#5}% + \bibdoif{#1}{\bibalternative\c!surnamesep#1\unskip}} + +\def\invertedshortauthor#1#2#3#4#5% + {\bibdoif{#2}{#2\bibalternative\c!vonsep}% + #3% + \bibdoif{#5}{\bibalternative\c!juniorsep#5}% + \bibdoif{#4}{\bibalternative\c!surnamesep#4\unskip}} + +%D \macros{clearbibitem,clearbibitemtwo,bibitemdefs} +%D +%D These are used in \type{\typesetapublication} to do +%D initializations and cleanups. + +\def\clearbibitem#1{\setvalue{\??pb @#1}{}}% + +% \def\clearbibitemtwo#1% +% {\letvalue{#1@num}\!!zerocount +% \doloop +% {\doifdefinedelse{\??pb @#1\recurselevel} +% {\letvalue{\??pb @#1\recurselevel}\empty} % why not undefined? +% {\exitloop}}} + +% \def\clearbibitemtwo#1% +% {\letvalue{#1@num}\!!zerocount +% \doloop +% {\ifcsname\??pb @#1\recurselevel\endcsname +% \expandafter\let\csname\??pb @#1\recurselevel\undefined +% \else +% \exitloop +% \fi}} + +\def\clearbibitemtwo#1% is this reset really needed? after all we reset the counter and we are local + {%\dofastrecurse\plusone{\csname#1@num\endcsname}\plusone{\expandafter\let\csname\??pb @#1\recurselevel\undefined}% + \letvalue{#1@num}\!!zerocount} + +\def\bibitemdefs#1% + {\@EA\let\csname#1\expandafter\endcsname\csname bib@#1\endcsname} + +\def\presetbibvariables % make a fast resetter (toks) + {\processcommacommand[\bibcommandlist,crossref]\clearbibitem + \processcommalist [artauthor,author,editor]\clearbibitemtwo + \processcommacommand[\bibcommandlist]\bibitemdefs + \processcommalist [artauthor,author,editor,crossref]\bibitemdefs} + +%D \macros{startpublication} +%D +%D We are coming to the end of this module, to the macros that +%D do typesetting and read the \type{bbl} file. + +\newcount\bibtexcounter + +%D Just a \type{\dosingleempty} is the most friendly +%D of doing this: there need not even be an argument +%D to \type{\startpublication}. Of course, then there +%D is no key either, and it had better be an +%D article (otherwise the layout will be all screwed up). +%D +%D Only specifying the key in the argument is also +%D legal. In storing this stuff into macros, some trickery with +%D token registers is needed to fix the expansion problems. Even so, +%D this appears to not always be 100\% safe, so people are +%D urgently advised to use \ETEX\ instead of traditional \TEX. +%D +%D In \ETEX, all expansion problems are conveniently solved by +%D the primitive \type{\protected}. To put that another way: +%D +%D It's not a bug in this module if it does not appear in \ETEX! +%D +%D Now prohibits comments, so % can be used for urls + +\unexpanded\def\startpublication + {\begingroup + \catcode`\%=\othercatcode + \dosingleempty\dostartpublication} + +\let\stoppublication\relax + +% this is rather memory hungry; some day i will rewrite this so that +% we use the database instead + +%D \macros{doifbibreferencefoundelse} +%D +%D Some macros to fetch the information provided by +%D \type{\startpublication}. + +% we can consider a faster variant in the bbl file; we can also consider +% storing the keys in lua (and then do more in lua) and use calls to +% fetch the variables + +% hm, we can store at the lua end ... + +\long\def\dostartpublication[#1]#2\stoppublication + {\doifassignmentelse{#1}% + {\getparameters[\??pb][k=\s!unknown,t=article,n=,s=,a=,y=,o=,u=,#1]}% + {\getparameters[\??pb][k=#1,t=article,n=,s=,a=,y=,o=,u=]}% + \ctxlua{bibtex.hacks.register("\@@pbk")}% + \setxvalue{pbd:\@@pbk}##1{\noexpand\ifcase##1\noexpand\or + \@@pbk\noexpand\or + \@@pba\noexpand\or + \@@pby\noexpand\or + \@@pbs\noexpand\or + \@@pbn\noexpand\or + \@@pbt\noexpand\or + \@@pbo\noexpand\or + \@@pbu\noexpand\or + \normalunexpanded{#2}\noexpand\fi}% + \endgroup + \ignorespaces} + +\def\bibgetvark#1{\csname pbd:#1\endcsname\plusone } +\def\bibgetvara#1{\csname pbd:#1\endcsname\plustwo } +\def\bibgetvary#1{\csname pbd:#1\endcsname\plusthree} +\def\bibgetvars#1{\csname pbd:#1\endcsname\plusfour } +\def\bibgetvarn#1{\csname pbd:#1\endcsname\plusfive } +\def\bibgetvart#1{\csname pbd:#1\endcsname\plussix } +\def\bibgetvaro#1{\csname pbd:#1\endcsname\plusseven} +\def\bibgetvaru#1{\csname pbd:#1\endcsname\pluseight} +\def\bibgetvard#1{\csname pbd:#1\endcsname\plusnine } + +\def\doifbibreferencefoundelse#1% + {\preloadbiblist + \doifdefinedelse{pbd:#1} + \firstoftwoarguments + {\showmessage\m!publications{5}{#1 is unknown}\secondoftwoarguments}} + +%D \macros{bib@crossref} +%D +%D \type{\crossref} is used in database files to point to another +%D entry. Because of this special situation, it has to be defined +%D separately. Since this command will not be seen until at +%D \type{\placepublications}, it may force extra runs. The same is +%D true for \type{\cite} commands inside of publications. + +% used in bib self + +\def\bib@crossref#1% called via \csname \endcsname + {\setvalue{\??pb @crossref}{#1}\ignorespaces} + +\def\bibinsertcrossref#1#2#3% + {\bibdoifelse\@@pb@crossref{#1\cite[\@@pb@crossref]#2}{#3}} + +\appendtoks\let\insertcrossref\bibinsertcrossref\to\initializebibdefinitions + +%D The next macro is needed because the number command of the +%D publist sometimes needs to fetch something from the current +%D item (like the 'short' key). For this, the ID of the current +%D item is passed in the implict parameter \type{\currentpublicationkey} + +\def\doprocessbibtexentry#1{\typesetapublication{#1}} + +\def\typesetpubslist + {\dobeginoflist + % \the\initializebibdefinitions % COMMENTED + \edef\currentlist{pubs}% + \doif{\listparameter\c!criterium}\v!cite{\setuplist[pubs][\c!criterium=\v!here]}% + \ctxlua{bibtex.hacks.reset(\number\bibtexoncemode)}% + \placestructurelist{pubs}{\listparameter\c!criterium}{\listparameter\c!number}% + \ctxlua{bibtex.hacks.flush("\@@pbsorttype")}% + \doendoflist} + +\newif\ifinpublist + +\def\initializepubslist + {\def\currentlist{pubs}% + \edef\@@pbnumbering{\@@pbnumbering}% + \doifelse\@@pbautohang\v!yes + {\ifx\@@pbnumbering\v!short + \setbox\scratchbox\hbox{\@@pbnumbercommand{\listparameter\c!samplesize}}% + \else + \setbox\scratchbox\hbox{\@@pbnumbercommand{\ctxlua{tex.write(structure.lists.size())}}}% + \fi + \edef\publistnumberbox{\hbox to \the\wd\scratchbox}% + \expanded{\setuplist[pubs][\c!width=\the\wd\scratchbox,\c!distance=\zeropoint]}% + \ifx\@@pbnumbering\v!short + \def\@@pblimitednumber##1{\publistnumberbox{\@@pbnumbercommand{\bibgetvars\currentpublicationkey}}}% + \else\ifx\@@pbnumbering\v!bib + \def\@@pblimitednumber##1{\publistnumberbox{\@@pbnumbercommand{\bibgetvarn\currentpublicationkey}}}% + \else + \def\@@pblimitednumber##1{\publistnumberbox{\@@pbnumbercommand{##1}}}% + \fi\fi} + {\doifelsenothing{\listparameter\c!width} + {\let \publistnumberbox \hbox} + {\edef\publistnumberbox{\hbox to \listparameter\c!width}}% + \ifx\@@pbnumbering\v!short + \def\@@pblimitednumber##1{\publistnumberbox{\@@pbnumbercommand{\bibgetvars\currentpublicationkey}}}% + \else\ifx\@@pbnumbering\v!bib + \def\@@pblimitednumber##1{\publistnumberbox{\@@pbnumbercommand{\bibgetvarn\currentpublicationkey}}}% + \else + \def\@@pblimitednumber##1{\publistnumberbox{\@@pbnumbercommand{##1}}}% + \fi\fi}% + \ifx\@@pbnumbering\v!no + \setuplist[pubs][\c!numbercommand=,\c!symbol=\v!none,\c!textcommand=\outdented]% + \else + \setuplist[pubs][\c!numbercommand=\@@pblimitednumber]% + \fi + \doifelse{\publicationlistparameter\c!maybeyear}{\v!off}{\def\maybeyear##1{}}{\def\maybeyear##1{##1}}% + \forgetall} + +\def\outdented#1% move to supp-box ? + {\hskip-\hangindent#1\relax} + +%D The full list of publications + +\def\completepublications + {\dosingleempty\docompletepublications} + +\def\docompletepublications[#1]% + {\begingroup + \setuplist[pubs][\c!criterium=\v!previous,#1] + \doifelsenothing{\publicationlistparameter\c!title} + {\systemsuppliedchapter[pubs]{\headtext{pubs}}} + {\normalexpanded{\systemsuppliedchapter[pubs]{\publicationlistparameter\c!title}}}% + \dodoplacepublications} + +%D And the portion with the entries only. + +\def\bibrefprefix{\number\bibtexblock:} + +\unexpanded\def\placepublications + {\dosingleempty\doplacepublications} + +\def\doplacepublications[#1]% + {\begingroup + \setuplist[pubs][\c!criterium=\v!previous,#1]% + \dodoplacepublications} + +\def\dodoplacepublications + {\determinelistcharacteristics[pubs]% + \initializepubslist + \doifnot{\namedlistparameter{pubs}\c!option}\v!continue + {\global\bibtexcounter\zerocount}% + \inpublisttrue + \typesetpubslist + \inpublistfalse + \endgroup + \global\advance\bibtexblock\plusone} + +%D \subsubject{What's in a publication} + +\unexpanded\def\typesetapublication#1% + {\doifsomething{#1} + {\doifelse{\namedlistparameter{pubs}\c!criterium}\v!all + {\doplacepublicationindeed{#1}}% + {\ctxlua{bibtex.hacks.doifalreadyplaced("#1")} + {} + {\doplacepublicationindeed{#1}}}% + }} + +% for the moment we don't access the data directly but we will do that +% later when we get away from storing the data and only deal with +% references + +% we'll define proper handlers later + +\def\doplacepublicationindeed#1% + {\doifbibreferencefoundelse{#1} + {\global\advance\bibtexcounter\plusone + \def\currentpublicationkey{#1}% + \ctxlua{bibtex.hacks.registerplaced("#1")}% + \dodolistelement + {pubs}% + {}% + {\number\bibtexcounter}% + {\expanded{\reference[\bibrefprefix#1]{\number\bibtexcounter}}% + \strut\dotypesetapublication{#1}\strut}% + {}% + {}} + {}} % invalid + +\def\dotypesetapublication#1% + {\bgroup + \the\initializebibdefinitions % NEW + \def\@@currentalternative{:l:}% + \presetbibvariables + \let\biblanguage\empty + \ignorespaces + \bibgetvard{#1}% + \removeunwantedspaces + \ignorespaces + \bibalternative{\bibgetvart{#1}}% + \removeunwantedspaces + \egroup} + +%D An few afterthoughts: + +\let\maybeyear\gobbleoneargument +\let\noopsort \gobbleoneargument + +%D This is the result of bibtex's `language' field. + +\def\setbiblanguage#1#2{\setvalue{\??pb\s!language#1}{#2}} + +\def\lang#1% + {\edef\biblanguage{#1}% + \ifcsname\??pb\s!language#1\endcsname + \language[\getvalue{\??pb\s!language#1}]% + \fi + \ignorespaces} + +%D \subject{Citations} + +%D \macros{cite,bibref} +%D +%D The indirection with \type{\dobibref} allows \LATEX\ style +%D \type{\cite} commands with a braced argument (these might appear +%D in included data from the \type{.bib} file). + +% \unexpanded\def\cite +% {\doifnextoptionalelse\dodocite\dobibref} +% \def\dobibref#1% +% {\docite[#1][]} +% \def\dodocite[#1]% +% {\startstrictinspectnextcharacter +% \dodoubleempty\dododocite[#1]} +% \def\dododocite % [#1][#2] +% {\stopstrictinspectnextcharacter +% \docite} + +\unexpanded\def\cite + {\strictdoifnextoptionalelse\dodocite\dobibref} + +\def\dobibref#1% + {\docite[#1][]} + +\def\dodocite[#1]% + {\strictdoifnextoptionalelse{\docite[#1]}{\docite[#1][]}} + +\def\docite[#1][#2]% + {\begingroup + \doifelsenothing{#2}\secondargumentfalse\secondargumenttrue + \ifsecondargument + \dowhatevercite{#1}{#2}% + \else + \donumberedcite{#1}% + \fi + \endgroup} + +\def\dowhatevercite#1#2% + {\processcommalist[#2]\docitation + \setupinteraction[\c!style=]% + \doifassignmentelse + {#1}% + {\getparameters[LO][\c!alternative=,\c!extras=,#1]% + \edef\@@currentalternative{\LOalternative}% + \ifx\@@currentalternative\empty + \edef\@@currentalternative{\@@citedefault}% + \fi + \ifx\LOextras\empty + \setupcite[\@@currentalternative][#1]% + \else + \expandafter\ifx\csname LOright\endcsname \relax + \edef\LOextras{{\LOextras\bibalternative\c!right}}% + \else + \edef\LOextras{{\LOextras\LOright}}% + \fi + \expanded{\setupcite[\@@currentalternative][#1,\c!right=\LOextras]}% + \fi}% + {\def\@@currentalternative{#1}}% + \doifelsevalue{@@pv\@@currentalternative\c!compress}\v!no\bibcitecompressfalse\bibcitecompresstrue + \getvalue{bib\@@currentalternative ref}[#2]} + +\def\donumberedcite#1% + {\processcommalist[#1]\docitation + \setupinteraction[\c!style=]% + \edef\@@currentalternative{\@@citedefault}% + \doifelsevalue{@@pv\@@currentalternative\c!compress}\v!no\bibcitecompressfalse\bibcitecompresstrue + \getvalue{bib\@@citedefault ref}[#1]} + +%D \macros{nocite} + +\def\nocite[#1]% + {\processcommalist[#1]\docitation} + +%D \macros{setupcite} + +\unexpanded\def\setupcite{\dodoubleempty\dosetupcite} + +\def\dosetupcite[#1][#2]% + {\ifsecondargument + \def\dodosetupcite##1{\getparameters[\??pv##1][#2]}% + \processcommalist[#1]\dodosetupcite + \else % default case + \getparameters[\??pv\@@citedefault][#1]% + \fi} + +%D Low-level stuff + +\def\getcitedata#1[#2]#3[#4]#5to#6% + {\bgroup + \dofetchapublication{#4}% + \doifdefinedelse{\??pb @bib#2}% + {\xdef#6{\getvalue{\??pb @bib#2}}}% + {\xdef#6{\getvalue{\??pb @#2}}}% + \egroup} + +\def\dofetchapublication#1% + {\def\currentpublicationkey{#1}% + \presetbibvariables + \ignorespaces\bibgetvard{#1}} + +\def\docitation#1% + {\iftrialtypesetting \else + \expanded{\writedatatolist[pubs][bibref=#1]}% + \fi} + +\let\addthisref\gobbleoneargument % keep this for compatibility + +%D \macros{ixbibauthoryear,thebibauthors,thebibyears} +%D +%D If compression of \type{\cite}'s argument expansion is on, +%D the macros that deal with authors and years call this internal +%D command to do the actual typesetting. +%D +%D Two entries with same author but with different years may +%D be condensed into ``Author (year1,year2)''. This is about the +%D only optimization that makes sense for the (author,year) +%D style of citations (years within one author have to be unique +%D anyway so no need to test for that, and ``Author1, Author2 (year)'' +%D creates more confusion than it does good). +%D +%D In the code below, +%D the macro \type{\thebibauthors} holds the names of the alternative +%D author info fields for the current list. This is a commalist, +%D and \type{\thebibyears} holds the (collection of) year(s) that go with +%D this author (possibly as a nested commalist). +%D +%D There had better be an author for all cases, but there +%D does not have to be year info always. \type{\thebibyears} is +%D pre-initialized because this makes the insertion macros simpler. +%D +%D In `normal' \TeX, of course there are expansion problems again. + +\def\ixbibauthoryear#1#2#3#4% + {\bgroup + \gdef\ixlastcommand {#4}% + \gdef\ixsecondcommand{#3}% + \gdef\ixfirstcommand {#2}% + \glet\thebibauthors \empty + \glet\thebibyears \empty + \getcommalistsize[#1]% + \ifbibcitecompress + \dorecurse\commalistsize{\xdef\thebibyears{\thebibyears,}}% + \processcommalist[#1]\docompressbibauthoryear + \else + \processcommalist[#1]\donormalbibauthoryear + \fi + \egroup + \dobibauthoryear} + +%D \macros{dodobibauthoryear} +%D +%D This macro only has to make sure that the lists +%D \type{\thebibauthors} and \type{\thebibyears} are printed. + +\def\dobibauthoryear + {\scratchcounter\zerocount + \getcommacommandsize[\thebibauthors]% + \edef\authorcount{\commalistsize}% + \@EA\processcommalist\@EA[\thebibauthors]\dodobibauthoryear} + +\def\dodobibauthoryear#1% + {\advance\scratchcounter\plusone + \edef\wantednumber{\the\scratchcounter}% + \getfromcommacommand[\thebibyears][\wantednumber]% + \@EA\def\@EA\currentbibyear\@EA{\commalistelement}% + \setcurrentbibauthor{#1}% + \ifnum\scratchcounter=\plusone + \ixfirstcommand + \else\ifnum \scratchcounter=\authorcount\relax + \ixlastcommand + \else + \ixsecondcommand + \fi\fi} + +\def\setcurrentbibauthor#1% + {\getcommacommandsize[#1]% + \ifcase\commalistsize + % anonymous? + \let\currentbibauthor\empty + \or + \def\currentbibauthor{#1}% + \or + \expanded{\docurrentbibauthor#1}% + \else + \handlemultiplebibauthors{\commalistsize}{#1}% + \fi} + +\newcount\citescratchcounter + +\def\handlemultiplebibauthors#1#2% + {\citescratchcounter\zerocount + \let\currentbibauthor\empty + \def\bibprocessauthoritem##1% + {\advance\citescratchcounter\plusone + \ifnum \citescratchcounter=#1\relax + \edef\currentbibauthor{\currentbibauthor##1}% + \else\ifnum\numexpr\citescratchcounter+\plusone\relax=#1\relax + \edef\currentbibauthor{\currentbibauthor##1\bibalternative{andtext}}% + \else + \edef\currentbibauthor{\currentbibauthor##1\bibalternative{namesep}}% + \fi\fi}% + \processcommalist[#2]\bibprocessauthoritem} + +\setupcite + [author,authoryear,authoryears] + [\c!namesep={, }] + +%D This discovery of authoretallimit is not the best one, +%D but it will do for now. + +\def\docurrentbibauthor#1,#2% + {\doifemptyelse{#2} + {\def\currentbibauthor{#1\bibalternative{otherstext}}} + {\@EA\ifx\csname\??pv\@@currentalternative authoretallimit\endcsname\relax + \edef\currentbibauthor{#1\bibalternative{andtext}#2}% + \else + \edef\currentbibauthor{#1% + \ifcase0\bibalternative{authoretallimit}\relax\or + \bibalternative{otherstext}\else\bibalternative{andtext}#2\fi}% + \fi}} + +%D This is not the one Hans made for me, because I need a global +%D edef, and the \type{\robustdoifinsetelse} doesn't listen to +%D \type{\doglobal } + +\def\robustaddtocommalist#1#2% {item} \cs + {\robustdoifinsetelse{#1}#2\resetglobal + {\dodoglobal\xdef#2{\ifx#2\empty\else#2,\fi#1}}} + +%D \macros{donormalbibauthoryear} +%D +%D Now we get to the macros that fill the two lists. +%D The `simple' one really is quite simple. + +\def\donormalbibauthoryear#1% + {\def\myauthor{Xxxxxxxxxx}% + \def\myyear{0000}% + \doifbibreferencefoundelse{#1} + {\def\myauthor{{\bibgetvara{#1}}}% + \def\myyear {\bibgetvary{#1}}}% + {}% + \@EA\doglobal\@EA\appendtocommalist\@EA{\myauthor}\thebibauthors + \@EA\doglobal\@EA\appendtocommalist\@EA{\myyear }\thebibyears} + +%D \macros{docompressbibauthoryear} +%D +%D So much for the easy parts. Nothing at all will be done if +%D the reference is not found or the reference does not contain +%D author data. No questions marks o.s.s. (to be fixed later) + +\def\docompressbibauthoryear#1% + {\def\myauthor{Xxxxxxxxxx}% + \def\myyear {0000}% + \doifbibreferencefoundelse{#1} + {\xdef\myauthor{\bibgetvara{#1}}% + \xdef\myyear {\bibgetvary{#1}}} + {}% + \ifx\myauthor\empty\else + \checkifmyauthoralreadyexists + \findmatchingyear + \fi} + +%D two temporary counters. One of these two can possibly be replaced +%D by \type{\scratchcounter}. + +\newcount\bibitemcounter +\newcount\bibitemwanted + +%D The first portion is simple enough: if this is the very first author +%D it is quite straightforward to add it. \type{\bibitemcounter} and +%D \type{\bibitemwanted} are needed later to insert the year +%D information in the correct item of \type{\thebibyears} + +\def\checkifmyauthoralreadyexists + {\doifemptyelsevalue{thebibauthors} + {\global\bibitemwanted \plusone + \global\bibitemcounter\plusone + \xdef\thebibauthors{{\myauthor}}} + {% the next weirdness is because according to \getcommalistsize, + % the length of \type{[{{},{}}]} is 2. + \@EA\getcommalistsize\@EA[\thebibauthors,]% + \global\bibitemcounter\numexpr\commalistsize+\minusone\relax + \global\bibitemwanted \zerocount + \processcommacommand[\thebibauthors]\docomparemyauthor}} + +%D The outer \type{\ifnum} accomplishes the addition of +%D a new author to \type{\thebibauthors}. The messing about with +%D the two counters is again to make sure that \type{\thebibyears} +%D will be updated correctly.If the author {\it was} found, +%D the counters will stay at their present values and everything +%D will be setup properly to insert the year info. + +\def\docomparemyauthor#1% + {\global\advance\bibitemwanted \plusone + \def\mytempc{#1}% + \ifx\mytempc\myauthor + \quitcommalist + \else\ifnum\bibitemwanted=\bibitemcounter\relax + \global\advance\bibitemwanted \plusone + \global\bibitemcounter\bibitemwanted\relax + \@EA\doglobal\@EA\robustaddtocommalist\@EA{{\myauthor}}\thebibauthors + \fi\fi} + +%D This macro should be clear now. + +\def\findmatchingyear + {\edef\wantednumber{\the\bibitemwanted}% + \getfromcommacommand[\thebibyears][\wantednumber]% + \ifx\commalistelement\empty + \edef\myyear{{\myyear}}% + \else + \edef\myyear{{\commalistelement,\myyear}}% + \fi + \edef\newcommalistelement{\myyear}% + \doglobal\replaceincommalist \thebibyears \wantednumber} + +%D \macros{gotobiblink,inbiblink,atbiblink} +%D +%D The final task is looping over that list until a match is found. + +\def\gotobiblink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{\unknownreference{#2}}} +\def\atbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{\unknownreference{#1}}} +\def\inbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{\unknownreference{#1}}} + +%D \macros{bibauthoryearref,bibauthoryearsref,bibauthorref,bibyearref} +%D +%D Now that all the hard work has been done, these are simple. +%D \type{\ixbibauthoryearref} stores the data in the macros +%D \type{\currentbibauthor} and \type{\currentbibyear}. + +\def\ifbibinteractionelse + {\iflocation + \edef\test{\bibalternative\c!interaction}% + \ifx\test\v!stop + \@EA\@EA\@EA\secondoftwoarguments + \else + \@EA\@EA\@EA\firstoftwoarguments + \fi + \else + \@EA\secondoftwoarguments + \fi} + +\def\ifbibinteractionelse + {\iflocation + \doifelse{\bibalternative\c!interaction}\v!stop + {\@EA\secondoftwoarguments} + {\@EA\firstoftwoarguments}% + \else + \@EA\secondoftwoarguments + \fi} + +\def\bibmaybeinteractive#1#2% + {\ifbibcitecompress + #2% + \else + \ifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}% + \fi} + +\def\bibauthoryearref[#1]% + {\ixbibauthoryear{#1}% + {\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween + \bibalternative\v!left{\currentbibyear}\bibalternative\v!right}} + {\bibalternative\c!pubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween + \bibalternative\v!left {\currentbibyear}\bibalternative\v!right}} + {\bibalternative\c!lastpubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween + \bibalternative\v!left {\currentbibyear}\bibalternative\v!right}}} + +\def\bibauthoryearsref[#1]% + {\bibalternative\v!left + \ixbibauthoryear{#1} + {\bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}} + {\bibalternative\c!pubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}} + {\bibalternative\c!lastpubsep + \bibmaybeinteractive{#1}{{\currentbibauthor}\bibalternative\c!inbetween{\currentbibyear}}}% + \bibalternative\v!right} + +\def\bibauthorref[#1]% + {\bibalternative\v!left + \ixbibauthoryear{#1}% + {\bibmaybeinteractive{#1}{{\currentbibauthor}}} + {\bibalternative\c!pubsep \bibmaybeinteractive{#1}{{\currentbibauthor}}} + {\bibalternative\c!lastpubsep\bibmaybeinteractive{#1}{{\currentbibauthor}}}% + \bibalternative\v!right} + +\def\bibyearref[#1]% + {\bibalternative\v!left + \ixbibauthoryear{#1}% + {\bibmaybeinteractive{#1}{{\currentbibyear}}} + {\bibalternative\c!pubsep \bibmaybeinteractive{#1}{{\currentbibyear}}} + {\bibalternative\c!lastpubsep\bibmaybeinteractive{#1}{{\currentbibyear}}}% + \bibalternative\v!right} + +%D \macros{bibshortref,bibkeyref,bibpageref,bibtyperef,bibserialref} +%D +%D There is hardly any point in trying to compress these. The only +%D thing that needs to be done is making sure that +%D the separations are inserted correctly. And that is +%D what \type{\bibinsertrefsep} does. + +\newconditional\firstbibrefsep + +\def\bibresetrefsep + {\settrue\firstbibrefsep} + +\def\bibinsertrefsep + {\ifconditional\firstbibrefsep + \setfalse\firstbibrefsep + \else + \bibalternative\c!pubsep + \fi} + +\def\bibshortref[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobibshortref + \bibalternative\v!right} + +\def\dobibshortref#1% + {\bibinsertrefsep + \doifbibreferencefoundelse{#1} + {\gotobiblink{\bibgetvars{#1}}[#1]} + {\unknownreference{#1}}} + +\def\bibserialref[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobibserialref + \bibalternative\v!right} + +\def\dobibserialref#1% + {\bibinsertrefsep + \doifbibreferencefoundelse{#1} + {\gotobiblink{\bibgetvarn{#1}}[#1]} + {\unknownreference{#1}}} + +\def\bibkeyref[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobibkeyref + \bibalternative\v!right} + +\def\dobibkeyref#1% + {\bibinsertrefsep + \gotobiblink{#1}[#1]} + +\def\bibgotoDOI#1#2% + {\ifbibinteractionelse + {\useURL[bibfooDoi#1][#2]% + \useURL[bibfoo#1][http://dx.doi.org/#2]% + \goto{\url[bibfooDoi#1]}[url(bibfoo#1)]} + {\hyphenatedurl{#2}}} + +\def\bibdoiref[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobibdoiref + \bibalternative\v!right} + +\def\dobibdoiref#1% + {\bibinsertrefsep + \doifbibreferencefoundelse{#1} + {\expanded{\bibgotoDOI{#1}{\bibgetvaro{#1}}}} + {\unknownreference{#1}}} + +\def\biburlref[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobiburlref + \bibalternative\v!right} + +\def\bibgotoURL#1#2% + {\ifbibinteractionelse + {\useURL[bibfoo#1][#2]\goto{\url[bibfoo#1]}[url(bibfoo#1)]} + {\hyphenatedurl{#2}}} + +\def\dobiburlref#1% + {\bibinsertrefsep + \doifbibreferencefoundelse{#1} + {\expanded{\bibgotoURL{#1}{\bibgetvaru{#1}}}} + {\unknownreference{#1}}} + +\def\bibtyperef[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobibtyperef + \bibalternative\v!right} + +\def\dobibtyperef#1% + {\bibinsertrefsep + \doifbibreferencefoundelse{#1} + {\gotobiblink{\bibgetvart{#1}}[#1]} + {\unknownreference{#1}}} + +\def\bibpageref[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobibpageref + \bibalternative\v!right} + +\def\dobibpageref#1% + {\bibinsertrefsep + \ifbibinteractionelse + {\atbiblink[#1]} + {{\referencingfalse\at[#1]}}} + +\def\bibdataref[#1]% + {\bibalternative\v!left + \bibresetrefsep\processcommalist[#1]\dobibdata + \bibalternative\v!right} + +\def\dobibdata#1% + {\bibinsertrefsep + \doifbibreferencefoundelse{#1} + {\dotypesetapublication{#1}} + {\unknownreference{#1}}} + +\let\bibnoneref\nocite + +%D \macros{bibnumref} + +\def\bibnumref[#1]% + {\begingroup + \bibalternative\v!left + \penalty\!!tenthousand + \ctxlua{bibtex.hacks.resolve("","\number\bibtexblock","#1")}% + \bibalternative\v!right + \endgroup} + +\def\dowithbibtexnumrefconnector#1#2% + {\ifnum#1>\plusone + \ifnum#2>\plusone + \ifnum#2=#1\relax + \bibalternative{lastpubsep}% + \else + \bibalternative{pubsep}% + \fi + \fi + \fi} + +\def\dowithbibtexnumref#1#2#3#4#5% n, i, prefix block ref + {\dowithbibtexnumrefconnector{#1}{#2}% + \def\bibrefprefix{#4:}% + \inbiblink[#5]} + +\def\dowithbibtexnumrefrange#1#2#3#4#5#6#7% n, i, prefix block ref + {\dowithbibtexnumrefconnector{#1}{#2}% + \def\bibrefprefix{#4:}% + \inbiblink[#5]% + \endash + \def\bibrefprefix{#6:}% + \inbiblink[#7]} + +%D By request from Sanjoy. This makes it easier to implement +%D \type{\citeasnoun}. + +\def\bibauthornumref[#1]% + {\getcommalistsize[#1]% + \global\bibitemcounter\commalistsize + \bibresetrefsep + \processcommalist[#1]\dobibauthornumref } + +\def\dobibauthornumref#1% + {\bibinsertrefsep + \doifbibreferencefoundelse{#1} + {\begingroup + \bibgetvara{#1}% + \bibalternative\c!inbetween + \setuppublications[\c!refcommand=num]% + \cite[#1]% + \endgroup} + {\unknownreference{#1}}} + +%D And some defaults are loaded from bibl-apa: + +\def\c!monthconversion{monthconversion} % todo + +\setuppublications + [\c!monthconversion=, + \c!alternative=apa, + \c!method=\v!global, + \c!refcommand=num, + \c!numbercommand=\bibleftnumber] + +\def\preloadbiblist + {\globallet\preloadbiblist\relax + \dousepublications\jobname} + +% \appendtoks \preloadbiblist \to \everysetuppublications +% \appendtoks \preloadbiblist \to \everystarttext + +\protect \endinput diff --git a/Master/texmf-dist/tex/context/base/bibl-tst.lua b/Master/texmf-dist/tex/context/base/bibl-tst.lua index a1b85b0f78b..5ff8f45709f 100644 --- a/Master/texmf-dist/tex/context/base/bibl-tst.lua +++ b/Master/texmf-dist/tex/context/base/bibl-tst.lua @@ -11,8 +11,8 @@ bibtex.load(session,"texgraph.bib") bibtex.load(session,"texjourn.bib") bibtex.load(session,"texnique.bib") bibtex.load(session,"tugboat.bib") +print(bibtex.size,statistics.elapsedtime(bibtex)) bibtex.toxml(session) - print(bibtex.size,statistics.elapsedtime(bibtex)) --~ print(table.serialize(session.data)) diff --git a/Master/texmf-dist/tex/context/base/blob-ini.lua b/Master/texmf-dist/tex/context/base/blob-ini.lua new file mode 100644 index 00000000000..0f7ccee26c5 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/blob-ini.lua @@ -0,0 +1,165 @@ +if not modules then modules = { } end modules ['blob-ini'] = { + version = 1.001, + comment = "companion to blob-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- later we will consider an OO variant. + +-- This module is just a playground. Occasionally we need to typeset +-- at the lua and and this is one method. In principle we can construct +-- pages this way too which sometimes makes sense in dumb cases. Actually, +-- if one only needs this, one does not really need tex, okay maybe the +-- parbuilder but that one can be simplified as well then. + +-- set fonts, attributes +-- rest already done in packers etc +-- add local par whatsit (or wait till cleaned up) +-- collapse or new pars +-- interline spacing etc + +-- DON'T USE THESE FUNCTIONS AS THEY WILL CHANGE! + +local type = type + +local utfvalues = string.utfvalues +local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns + +local fontdata = fonts.identifiers + +local new_glyph_node = nodes.glyph +local new_glue_node = nodes.glyph + +local copy_node = node.copy +local copy_node_list = node.copy_list +local insert_node_after = node.insert_after +local flush_node_list = node.flush_list +local hpack_node_list = node.hpack +local vpack_node_list = node.vpack +local write_node = node.write + +local current_font = font.current + +blobs = blobs or { } + +local newline = lpegpatterns.newline +local space = lpegpatterns.spacer +local spacing = newline * space^0 +local content = (space^1)/" " + (1-spacing) + +local ctxtextcapture = lpeg.Ct ( ( + space^0 * ( + newline^2 * space^0 * lpeg.Cc("") + + newline * space^0 * lpeg.Cc(" ") + + lpeg.Cs(content^1) + ) +)^0) + +local function tonodes(str,fnt,attr) -- (str,template_glyph) + if not str or str == "" then + return + end + local head, tail, space, fnt, template = nil, nil, nil, nil, nil + if not fnt then + fnt = current_font() + elseif type(fnt) ~= "number" and fnt.id == "glyph" then + fnt, template = nil, fnt + -- else + -- already a number + end + for s in utfvalues(str) do + local n + if s == 32 then + if not space then + local parameters = fontdata[fnt].parameters + space = new_glue_node(parameters.space,parameters.space_stretch,parameters.space_shrink) + n = space + else + n = copy_node(space) + end + elseif template then + n = copy_node(template) + n.char = s + else + n = new_glyph_node(fnt,s) + end + if attr then -- normall false when template + n.attr = copy_node_list(attr) + end + if head then + insert_node_after(head,tail,n) + else + head = n + end + tail = n + end + return head, tail +end + +blobs.tonodes = tonodes + +function blobs.new() + return { + list = { }, + } +end + +function blobs.append(t,str) + local kind = type(str) + local dummy = nil + if kind == "string" then + local pars = lpegmatch(ctxtextcapture,str) + local list = t.list + for p=1,#pars do + local str = pars[p] + if #str == 0 then + list[#list+1 ] = { head = nil, tail = nil } + else + local l = list[#list] + if not l then + l = { head = nil, tail = nil } + list[#list+1 ] = l + end + local head, tail = tonodes(str,nil,nil) + if head then + if l.head then + l.tail.next = head + head.prev = l.tail + l.tail = tail + else + l.head, l.tail = head, tail + end + end + end + end + end +end + +function blobs.pack(t,how) + local list = t.list + for i=1,#list do + local pack = list[i].pack + if pack then + flush_node_list(node.pack) + end + if how == "vertical" then + -- we need to prepend a local par node + -- list[i].pack = node.vpack(list[i].head,"exactly") + logs.report("blobs","vpack not yet supported") + else + list[i].pack = hpack_node_list(list[i].head,"exactly") + end + end +end + +function blobs.write(t) + local list = t.list + for i=1,#list do + local pack = list[i].pack + if pack then + write_node(pack) + end + end +end diff --git a/Master/texmf-dist/tex/context/base/blob-ini.mkiv b/Master/texmf-dist/tex/context/base/blob-ini.mkiv new file mode 100644 index 00000000000..7f63ec73d91 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/blob-ini.mkiv @@ -0,0 +1,34 @@ +%D \module +%D [ file=blob-ini, +%D version=2010.04.06, +%D title=\CONTEXT\ \LUA\ Typesetting, +%D subtitle=Initialization, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=\PRAGMA] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Lua Typesetting / Initialization} + +%D This is a prelude to typesetting at the \LUA\ end. The code +%D is already quite old but will only get nice when we are further +%D down the road (close to version 1.00 of \LUATEX). Typesetting in +%D pure \LUA\ sometimes makes sense. + +\registerctxluafile{blob-ini}{1.001} + +\endinput + +% \starttext +% +% \startluacode +% local b = blobs.new() +% blobs.append(b,"Hello world.\n Here we are.\n\n And Again!") +% blobs.pack(b) +% blobs.write(b) +% \stopluacode +% +% \stoptext diff --git a/Master/texmf-dist/tex/context/base/buff-ini.lua b/Master/texmf-dist/tex/context/base/buff-ini.lua index f02963d6a06..6b1af8f96f3 100644 --- a/Master/texmf-dist/tex/context/base/buff-ini.lua +++ b/Master/texmf-dist/tex/context/base/buff-ini.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['buff-ini'] = { version = 1.001, - comment = "companion to core-buf.tex", + comment = "companion to core-buf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -20,16 +20,24 @@ buffers.visualizers = { } -- if needed we can make 'm local +local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end) +local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) + local utf = unicode.utf8 local concat, texsprint, texprint, texwrite = table.concat, tex.sprint, tex.print, tex.write local utfbyte, utffind, utfgsub = utf.byte, utf.find, utf.gsub local type, next = type, next -local byte, sub, find, char, gsub, rep, lower = string.byte, string.sub, string.find, string.char, string.gsub, string.rep, string.lower +local huge = math.huge +local byte, sub, find, char, gsub, rep, lower, format, gmatch, match = string.byte, string.sub, string.find, string.char, string.gsub, string.rep, string.lower, string.format, string.gmatch, string.match local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues local ctxcatcodes = tex.ctxcatcodes +local variables = interfaces.variables +local lpegmatch = lpeg.match + +local data, flags, hooks, visualizers = buffers.data, buffers.flags, buffers.hooks, buffers.visualizers -local data, commands, flags, hooks, visualizers = buffers.data, buffers.commands, buffers.flags, buffers.hooks, buffers.visualizers +visualizers.defaultname = variables.typing function buffers.raw(name) return data[name] or { } @@ -47,6 +55,7 @@ function buffers.append(name, str) data[name] = (data[name] or "") .. str end + buffers.flags.store_as_table = true -- to be sorted out: crlf + \ ; slow now @@ -65,11 +74,11 @@ function buffers.grab(name,begintag,endtag,bufferdata) buffers.level = buffers.level - 1 else if dn == "" then - dn = bufferdata:sub(1,#bufferdata-1) + dn = sub(bufferdata,1,#bufferdata-1) else - dn = dn .. "\n" .. bufferdata:sub(1,#bufferdata-1) + dn = dn .. "\n" .. sub(bufferdata,1,#bufferdata-1) end - dn = dn:gsub("[\010\013]$","") + dn = gsub(dn,"[\010\013]$","") if flags.store_as_table then dn = dn:splitlines() end @@ -104,23 +113,25 @@ function buffers.verbatimbreak(n,m) if flags.optimize_verbatim then if n == 2 or n == m then texsprint(no_break_command) - else + elseif n > 1 then texsprint(do_break_command) end end end -function buffers.strip(lines) - local first, last = 1, #lines +function buffers.strip(lines,first,last) + local first, last = first or 1, last or #lines for i=first,last do - if #lines[i] == 0 then + local li = lines[i] + if #li == 0 or find(li,"^%s*$") then first = first + 1 else break end end for i=last,first,-1 do - if #lines[i] == 0 then + local li = lines[i] + if #li == 0 or find(li,"^%s*$") then last = last - 1 else break @@ -129,15 +140,62 @@ function buffers.strip(lines) return first, last, last - first + 1 end -function buffers.type(name) +function buffers.range(lines,first,last,range) -- 1,3 1,+3 fromhere,tothere + local first, last = first or 1, last or #lines + local what = aux.settings_to_array(range) + local r_first, r_last = what[1], what[2] + local f, l = tonumber(r_first), tonumber(r_last) + if r_first then + if f then + if f > first then + first = f + end + else + for i=first,last do + if find(lines[i],r_first) then + first, strip = i + 1 + break + end + end + end + end + if r_last then + if l then + if find(r_last,"^[%+]") then -- 1,+3 + l = first + l + end + if l < last then + last = l + end + else + for i=first,last do + if find(lines[i],r_last) then + last = i - 1 + break + end + end + end + end + return first, last +end + +function buffers.type(name,realign,range) local lines = data[name] local action = buffers.typeline if lines then if type(lines) == "string" then lines = lines:splitlines() + data[name] = lines + end + if realign then + lines = buffers.realign(lines,realign) end local line, n = 0, 0 local first, last, m = buffers.strip(lines) + if range then + first, last = buffers.range(lines,first,last,range) + first, last = buffers.strip(lines,first,last) + end hooks.begin_of_display() for i=first,last do n, line = action(lines[i], n, m, line) @@ -153,16 +211,34 @@ function buffers.loaddata(filename) -- this one might go away if not str then ok, str, n = resolvers.loaders.tex(file.addsuffix(filename,'tex')) end - return str or "" end -function buffers.typefile(name) -- still somewhat messy, since name can be be suffixless +function buffers.loaddata(filename) -- this one might go away + local foundname = resolvers.findtexfile(filename) or "" + if foundname == "" then + foundname = resolvers.findtexfile(file.addsuffix(filename,'tex')) or "" + end + if foundname == "" then + return "" + else + return resolvers.loadtexfile(foundname) + end +end + +function buffers.typefile(name,realign,range) -- still somewhat messy, since name can be be suffixless local str = buffers.loaddata(name) if str and str~= "" then local lines = str:splitlines() + if realign then + lines = buffers.realign(lines,realign) + end local line, n, action = 0, 0, buffers.typeline local first, last, m = buffers.strip(lines) hooks.begin_of_display() + if range then + first, last = buffers.range(lines,first,last,range) + first, last = buffers.strip(lines,first,last) + end for i=first,last do n, line = action(lines[i], n, m, line) end @@ -173,7 +249,7 @@ end function buffers.typeline(str,n,m,line) n = n + 1 buffers.verbatimbreak(n,m) - if str:find("%S") then + if find(str,"%S") then line = line + 1 hooks.begin_of_line(line) hooks.flush_line(hooks.line(str)) @@ -187,28 +263,51 @@ function buffers.typeline(str,n,m,line) return n, line end -function buffers.save(name,list,encapsulate) - if not name or name == "" then - name = tex.jobname - end - if list then - -- ok - else +-- The optional prefix hack is there for the typesetbuffer feature and +-- in mkii we needed that (this hidden feature is used in a manual). + +local function prepared(name,list) -- list is optional + if not list or list == "" then list = name - name = tex.jobname .. "-" .. name .. ".tmp" end - local content = buffers.collect(list) + if not name or name == "" then + name = tex.jobname .. "-" .. list .. ".tmp" + end + local content = buffers.collect(list,nil) or "" if content == "" then content = "empty buffer" end - if encapsulate then - io.savedata(name, "\\starttext\n"..content.."\n\\stoptext\n") - else - io.savedata(name, content) + return name, content +end + +local capsule = "\\starttext\n%s\n\\stoptext\n" +local command = "context %s" + +function buffers.save(name,list,encapsulate) -- list is optional + local name, content = prepared(name,list) + io.savedata(name, (encapsulate and format(capsule,content)) or content) +end + +function commands.savebuffer(list,name) -- name is optional + buffers.save(name,list) +end + +function buffers.run(name,list,encapsulate) + local name, content = prepared(name,list) + local data = io.loaddata(name) + content = (encapsulate and format(capsule,content)) or content + if data ~= content then + if trace_run then + commands.writestatus("buffers","changes in '%s', processing forced",name) + end + io.savedata(name,content) + os.execute(format(command,name)) + elseif trace_run then + commands.writestatus("buffers","no changes in '%s', not processed",name) end end -local printer = (lpeg.linebyline/texprint)^0 +local printer = (lpeg.patterns.textline/texprint)^0 function buffers.get(name) local b = buffers.data[name] @@ -218,7 +317,7 @@ function buffers.get(name) texprint(b[i]) end else - printer:match(b) + lpegmatch(printer,b) end end end @@ -240,21 +339,15 @@ buffers.content = content function buffers.collect(names,separator) -- no print -- maybe we should always store a buffer as table so - -- that we can pass if directly + -- that we can pass it directly + if type(names) == "string" then + names = aux.settings_to_array(names) + end local t = { } - if type(names) == "table" then - for i=1,#names do - local c = content(names[i],separator) - if c ~= "" then - t[#t+1] = c - end - end - else - for name in names:gmatch("[^,%s]+") do - local c = content(name,separator) - if c ~= "" then - t[#t+1] = c - end + for i=1,#names do + local c = content(names[i],separator) + if c ~= "" then + t[#t+1] = c end end return concat(t,separator or "\r") -- "\n" is safer due to comments and such @@ -274,11 +367,12 @@ function buffers.inspect(name) local b = data[name] if b then if type(b) == "table" then - for _,v in ipairs(b) do + for k=1,#b do + local v = b[k] if v == "" then texsprint(ctxcatcodes,"[crlf]\\par ") -- space ? else - texsprint(ctxcatcodes,(gsub(b,"(.)",tobyte)),"\\par") + texsprint(ctxcatcodes,(gsub(v,"(.)",tobyte)),"\\par") end end else @@ -289,42 +383,90 @@ end -- maybe just line(n,str) empty(n,str) -visualizers.handlers = visualizers.handlers or { } -visualizers.escapetoken = nil -visualizers.tablength = 7 -visualizers.enabletab = true -- false -visualizers.enableescape = false -visualizers.obeyspace = true +visualizers.tablength = 7 +visualizers.enabletab = true -- false +visualizers.obeyspace = true + +function buffers.settablength(tablength) + visualizers.tablength = tablength and tonumber(tablength) or 7 +end + +visualizers.handlers = visualizers.handlers or { } local handlers = visualizers.handlers function buffers.newvisualizer(name) + name = lower(name) local handler = { } handlers[name] = handler return handler end function buffers.getvisualizer(name) - return handlers[name] + name = lower(name) + return handlers[name] or buffers.loadvisualizer(name) end -local default = buffers.newvisualizer("default") +function buffers.loadvisualizer(name) + name = lower(name) + local hn = handlers[name] + if hn then + return hn + else + environment.loadluafile("pret-" .. name) + local hn = handlers[name] + if not hn then + -- hn = buffers.newvisualizer(name) + hn = handlers[visualizers.defaultname] + handlers[name] = hn + if trace_visualize then + logs.report("buffers","mapping '%s' visualizer onto '%s'",name,visualizers.defaultname) + end + elseif trace_visualize then + logs.report("buffers","loading '%s' visualizer",name) + end + return hn + end +end + +-- was "default", should be set at tex end (todo) + +local default = buffers.newvisualizer(visualizers.defaultname) + +--~ print(variables.typing) os.exit() + +-- will become cleaner local currentvisualizer, currenthandler function buffers.setvisualizer(str) currentvisualizer = lower(str) currenthandler = handlers[currentvisualizer] - if not currenthandler then - currentvisualizer = 'default' + if currenthandler then + -- if trace_visualize then + -- logs.report("buffers","enabling specific '%s' visualizer",currentvisualizer) + -- end + else + currentvisualizer = visualizers.defaultname currenthandler = handlers.default + -- if trace_visualize then + -- logs.report("buffers","enabling default visualizer '%s'",currentvisualizer) + -- end end if currenthandler.reset then currenthandler.reset() end end -buffers.setvisualizer("default") +function buffers.resetvisualizer() + currentvisualizer = visualizers.defaultname + currenthandler = handlers.default + if currenthandler.reset then + currenthandler.reset() + end +end + +buffers.setvisualizer(visualizers.defaultname) function visualizers.reset() end @@ -351,10 +493,15 @@ function hooks.end_of_inline() (currenthandler.end_of_inline or default.end_of_inline)() end - function hooks.flush_line(str,nesting) - str = gsub(str," *[\n\r]+ *"," ") ; -- semi colon needed - (currenthandler.flush_line or default.flush_line)(str,nesting) + local fl = currenthandler.flush_line + if fl then + str = gsub(str," *[\n\r]+ *"," ") ; -- semi colon needed + fl(str,nesting) + else + -- gsub done later + default.flush_line(str,nesting) + end end function hooks.flush_inline(str,nesting) @@ -419,11 +566,11 @@ function default.line(str) end function default.flush_line(str) - str = str:gsub(" *[\n\r]+ *"," ") + str = gsub(str," *[\n\r]+ *"," ") if visualizers.obeyspace then for c in utfcharacters(str) do if c == " " then - texsprint(ctxcatcodes,"\\obs ") + texsprint(ctxcatcodes,"\\obs") else texwrite(c) end @@ -454,13 +601,14 @@ end -- special one -commands.nested = "\\switchslantedtype " +buffers.commands.nested = "\\switchslantedtype " -- todo : utf + faster, direct print and such. no \\char, vrb catcodes, see end function visualizers.flush_nested(str, enable) -- no utf, kind of obsolete mess - str = str:gsub(" *[\n\r]+ *"," ") + str = gsub(str," *[\n\r]+ *"," ") local result, c, nested, i = "", "", 0, 1 + local commands = buffers.commands -- otherwise wrong commands while i < #str do -- slow c = sub(str,i,i+1) if c == "<<" then @@ -481,7 +629,7 @@ function visualizers.flush_nested(str, enable) -- no utf, kind of obsolete mess c = sub(str,i,i) if c == " " then result = result .. "\\obs " - elseif c:find("%a") then + elseif find(c,"%a") then result = result .. c else result = result .. "\\char" .. byte(c) .. " " @@ -546,6 +694,106 @@ function buffers.flush_result(result,nested) end end +-- new + +function buffers.realign(name,forced_n) -- no, auto, + local n, d + if type(name) == "string" then + d = data[name] + if type(d) == "string" then + d = d:splitlines() + end + else + d = name -- already a buffer + end + forced_n = (forced_n == variables.auto and huge) or tonumber(forced_n) + if forced_n then + for i=1, #d do + local spaces = find(d[i],"%S") + if not spaces then + -- empty line + elseif not n then + n = spaces + elseif spaces == 0 then + n = 0 + break + elseif n > spaces then + n = spaces + end + end + if n > 0 then + if n > forced_n then + n = forced_n + end + for i=1,#d do + d[i] = sub(d[i],n) + end + end + end + return d +end + +-- escapes: buffers.set_escape("tex","/BTEX","/ETEX") + +local function flush_escaped_line(str,pattern,flushline) + while true do + local a, b, c = match(str,pattern) + if a and a ~= "" then + flushline(a) + end + if b and b ~= "" then + texsprint(ctxcatcodes,"{",b,"}") + end + if c then + if c == "" then + break + else + str = c + end + else + flushline(str) + break + end + end +end + +function buffers.set_escape(name,pair) + if pair and pair ~= "" then + local visualizer = buffers.getvisualizer(name) + visualizer.normal_flush_line = visualizer.normal_flush_line or visualizer.flush_line + if pair == variables.no then + visualizer.flush_line = visualizer.normal_flush_line or visualizer.flush_line + if trace_visualize then + logs.report("buffers","resetting escape range for visualizer '%s'",name) + end + else + local start, stop + if pair == variables.yes then + start, stop = "/BTEX", "/ETEX" + else + pair = string.split(pair,",") + start, stop = string.esc(pair[1] or ""), string.esc(pair[2] or "") + end + if start ~= "" then + local pattern + if stop == "" then + pattern = "^(.-)" .. start .. "(.*)(.*)$" + else + pattern = "^(.-)" .. start .. "(.-)" .. stop .. "(.*)$" + end + function visualizer.flush_line(str) + flush_escaped_line(str,pattern,visualizer.normal_flush_line) + end + if trace_visualize then + logs.report("buffers","setting escape range for visualizer '%s' to %s -> %s",name,start,stop) + end + elseif trace_visualize then + logs.report("buffers","problematic escape specification '%s' for visualizer '%s'",pair,name) + end + end + end +end + -- THIS WILL BECOME A FRAMEWORK: the problem with prety printing is that -- we deal with snippets and therefore we need tolerant parsing @@ -587,6 +835,4 @@ end --~ str = [[test 123 test $oeps$]] ---~ pattern:match(str) - - +--~ lpegmatch(pattern,str) diff --git a/Master/texmf-dist/tex/context/base/buff-ini.mkii b/Master/texmf-dist/tex/context/base/buff-ini.mkii index 0f0655ea126..fc147d09041 100644 --- a/Master/texmf-dist/tex/context/base/buff-ini.mkii +++ b/Master/texmf-dist/tex/context/base/buff-ini.mkii @@ -188,15 +188,15 @@ \letvalue{\??bu#1\c!number }\nofdefinedbuffers \letvalue{\??bu#1\c!paragraph}\v!no \setevalue{\e!start#1}{\noexpand\dostartbuffer[#1][def-\nofdefinedbuffers][\e!start#1][\e!stop#1]}% - \setevalue{\e!get #1}{\noexpand\dogetbuffer [#1][def-\nofdefinedbuffers]}% - \setevalue{\e!type #1}{\noexpand\dotypebuffer [#1][def-\nofdefinedbuffers]}% + \unexpanded\setevalue{\e!get #1}{\noexpand\dogetbuffer [#1][def-\nofdefinedbuffers]}% + \unexpanded\setevalue{\e!type #1}{\noexpand\dotypebuffer [#1][def-\nofdefinedbuffers]}% \getparameters[\??bu#1][#2]% \fi} \def\definebuffer {\dodoubleempty\dodefinebuffer} -\def\getbuffer +\unexpanded\def\getbuffer {\dodoubleempty\dogetbuffer} \def\dogetbuffer[#1][#2]% @@ -213,7 +213,7 @@ \dobuffer{16}{#2}\dogetbufferasis \getvalue{\??bu#1\c!after}} -\def\typebuffer +\unexpanded\def\typebuffer {\dodoubleempty\dotypebuffer} \def\dogetfilebuffer{\typefile{\TEXbufferfile{\currentbuffer}}} diff --git a/Master/texmf-dist/tex/context/base/buff-ini.mkiv b/Master/texmf-dist/tex/context/base/buff-ini.mkiv index 29ce4687de9..86b0fa3c530 100644 --- a/Master/texmf-dist/tex/context/base/buff-ini.mkiv +++ b/Master/texmf-dist/tex/context/base/buff-ini.mkiv @@ -51,11 +51,7 @@ % \stoptext \ifdefined\doinitializeverbatim \else% temp hack - \ifdefined\mkinitializeverbatim - \let\doinitializeverbatim\mkinitializeverbatim - \else - \def\doinitializeverbatim{\tttf} - \fi + \def\doinitializeverbatim{\tttf} \fi \unprotect @@ -150,7 +146,7 @@ \ctxlua{buffers.set("\currentbuffer", \!!bs\detokenize{#2}\!!es)}% \endgroup} -\def\setupbuffer +\unexpanded\def\setupbuffer {\dodoubleempty\dosetupbuffer} \def\dosetupbuffer[#1][#2]% @@ -165,16 +161,19 @@ \doglobal\increment\nofdefinedbuffers \letvalue{\??bu#1\c!number }\nofdefinedbuffers \letvalue{\??bu#1\c!paragraph}\v!no - \setevalue{\e!start#1}{\noexpand\dostartbuffer[#1][def-\nofdefinedbuffers][\e!start#1][\e!stop#1]}% - \setevalue{\e!get #1}{\noexpand\dogetbuffer [#1][def-\nofdefinedbuffers]}% - \setevalue{\e!type #1}{\noexpand\dotypebuffer [#1][def-\nofdefinedbuffers]}% + \setuevalue{\e!start#1}{\noexpand\dostartbuffer[#1][def-\nofdefinedbuffers][\e!start#1][\e!stop#1]}% maybe also relax stop + \setuevalue{\e!get #1}{\noexpand\dogetbuffer [#1][def-\nofdefinedbuffers]}% + \setuevalue{\e!type #1}{\noexpand\dotypebuffer [#1][def-\nofdefinedbuffers]}% \getparameters[\??bu#1][#2]% \fi} -\def\definebuffer +\unexpanded\def\definebuffer {\dodoubleempty\dodefinebuffer} + +\def\thebuffernumber#1% + {\csname\??bu#1\c!number\endcsname} -\def\getbuffer +\unexpanded\def\getbuffer {\dodoubleempty\dogetbuffer} \def\dogetbuffer[#1][#2]% @@ -191,18 +190,18 @@ \dobuffer{16}{#2}\dogetbufferasis \getvalue{\??bu#1\c!after}} -\def\typebuffer +\unexpanded\def\typebuffer {\dodoubleempty\dotypebuffer} \def\doprocessbufferverbatim {\doinitializeverbatim - \ctxlua{buffers.type("\currentbuffer")}} + \ctxlua{buffers.type("\currentbuffer","\typingparameter\c!strip")}} \def\doprocessbufferlinesverbatim#1#2#3% {#2% % todo, set up numbers \doinitializeverbatim - \ctxlua{buffers.type("\currentbuffer")} + \ctxlua{buffers.type("\currentbuffer","\typingparameter\c!strip")} #3} \def\doifelsebuffer#1% @@ -276,20 +275,20 @@ \ifsecondargument \dosetbufferoffset{#1}% \doifelse{#2}\v!all - {\def\startbufferparagraph{\normalbufferparagraph{#1}}} - {\def\startbufferparagraph{\filterbufferparagraph{#1}{#2}}}% - \def\stopbufferparagraph{\dostopbufferparagraph{#1}}% + {\unexpanded\def\startbufferparagraph{\normalbufferparagraph{#1}}} + {\unexpanded\def\startbufferparagraph{\filterbufferparagraph{#1}{#2}}}% + \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{#1}}% \def\next{\getparagraphedbuffer[#1]}% \else \dosetbufferoffset\empty - \def\startbufferparagraph{\filterbufferparagraph{}{#1}}% - \def\stopbufferparagraph{\dostopbufferparagraph{}}% + \unexpanded\def\startbufferparagraph{\filterbufferparagraph{}{#1}}% + \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{}}% \def\next{\getparagraphedbuffer[]}% \fi \else \dosetbufferoffset\empty - \def\startbufferparagraph{\normalbufferparagraph{}}% - \def\stopbufferparagraph{\dostopbufferparagraph{}}% + \unexpanded\def\startbufferparagraph{\normalbufferparagraph{}}% + \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{}}% \def\next{\getparagraphedbuffer[]}% \fi \next} @@ -329,7 +328,7 @@ \def\fakebufferparagraph#1% {\bgroup - \def\stopbufferparagraph{\dostopbufferparagraph{#1}\egroup\egroup}% + \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{#1}\egroup\egroup}% \setbox\scratchbox\vbox\bgroup\dostartbufferparagraph{#1}} % definitions @@ -342,8 +341,15 @@ \c!after=] % only mkiv: +% +% \startbuffer[x] +% x +% \stopbuffer +% +% \savebuffer[x][temp.log] + +\unexpanded\def\savebuffer{\dodoubleempty\dosavebuffer} -\def\savebuffer{\dosingleempty\dosavebuffer} -\def\dosavebuffer[#1]{\ctxlua{buffers.save("#1")}} +\def\dosavebuffer[#1][#2]{\ctxlua{commands.savebuffer("#1","#2")}} \protect \endinput diff --git a/Master/texmf-dist/tex/context/base/buff-ver.mkii b/Master/texmf-dist/tex/context/base/buff-ver.mkii index 6b3817a7028..e7ad5474f37 100644 --- a/Master/texmf-dist/tex/context/base/buff-ver.mkii +++ b/Master/texmf-dist/tex/context/base/buff-ver.mkii @@ -144,7 +144,6 @@ \let\obeycharacters\setupprettytype \let\obeytabs\ignoretabs} - \def\setupcommonverbatim {\recatcodeuppercharactersfalse % obey regime / encoding % @@ -577,11 +576,13 @@ \veryraggedright} \def\ignorehyphens - {% \language\minusone % extra bonus, the \null should do the job too + {% \nohyphens % forgotten when no \par + \normallanguage\minusone % fails as the font redoes the language \def\obeyedspace {\hskip\interwordspace\relax}% better than spaceskip \def\controlspace{\hskip\zeropoint\hbox{\normalcontrolspace}\hskip\zeropoint\relax}% \spaceskip.5em\relax} + \unexpanded\def\typ {\bgroup \let\@@tylines\v!hyphenated @@ -634,6 +635,11 @@ \unexpanded\def\mat{\groupedcommand{\setgroupedtype\$}{\$}} \unexpanded\def\dis{\groupedcommand{\setgroupedtype\$\$}{\$\$}} +\let\normaltexttex\tex +\let\normaltextarg\arg +\let\normaltextmat\mat +\let\normaltextdis\dis + %D \macros %D {starttyping} %D diff --git a/Master/texmf-dist/tex/context/base/buff-ver.mkiv b/Master/texmf-dist/tex/context/base/buff-ver.mkiv index 28e3e60d501..dacbdb7ac99 100644 --- a/Master/texmf-dist/tex/context/base/buff-ver.mkiv +++ b/Master/texmf-dist/tex/context/base/buff-ver.mkiv @@ -17,9 +17,9 @@ \unprotect -\ifx\startlinenumbering\undefined \let\startlinenumbering\relax \fi -\ifx\stoplinenumbering \undefined \let\stoplinenumbering\relax \fi -\ifx\setuplinenumbering\undefined \def\setuplinenumbering[#1]{} \fi +\ifdefined\startlinenumbering\else \let\startlinenumbering \relax \fi +\ifdefined\stoplinenumbering \else \let\stoplinenumbering \relax \fi +\ifdefined\setuplinenumbering\else \unexpanded\def\setuplinenumbering[#1]{} \fi % D \macros % D {iflinepar} @@ -66,16 +66,17 @@ \uppercasestring#2\to\asciib \setevalue{\??ty\??ty\asciia}{\asciib}} -\def\setupprettiesintype#1% +\unexpanded\def\setupprettiesintype#1% {\uppercasestring#1\to\ascii \edef\prettyidentifier{\executeifdefined{\??ty\??ty\ascii}{TEX}}% \begingroup % we can move this to lua - \lowercasestring \f!prettyprefix\prettyidentifier\to\filename - \doonlyonce\filename{\ctxloadluafile\filename\empty}% + % \lowercasestring \f!prettyprefix\prettyidentifier\to\filename + % \doonlyonce\filename{\ctxloadluafile\filename\empty}% + \ctxlua{buffers.loadvisualizer("\ascii")}% \endgroup} -\def\setupprettytype +\unexpanded\def\setupprettytype {\processingverbatimtrue % will move \ctxlua{buffers.visualizers.reset()}} @@ -84,7 +85,7 @@ \spaceskip\wd\scratchbox \xspaceskip\spaceskip} -\let\obeycharacters\relax +\let\obeycharacters\relax % tab mess can go \setvalue{\??tp:\c!lines:\v!yes }{\obeybreakpoints} \setvalue{\??tp:\c!lines:\v!hyphenated}{\obeyhyphens} @@ -93,59 +94,65 @@ %setvalue{\??tp:\c!empty:\v!all }{\obeyallemptylines} \setvalue{\??tp:\c!option:\v!none }{\let\obeycharacters\relax} -\setvalue{\??tp:\c!option:\v!color }{\setupprettiesintype{TEX}% - \let\obeycharacters\setupprettytype - \let\obeytabs\ignoretabs} +\setvalue{\??tp:\c!option:\empty }{\let\obeycharacters\relax} +\setvalue{\??tp:\c!option:\v!color }{\setupprettiesintype{\typingparameter\c!option}% + \let\obeycharacters\setupprettytype} \setvalue{\??tp:\c!option:\v!normal }{\let\obeycharacters\setupgroupedtype} -\setvalue{\??tp:\c!option:\v!commands }{\def\obeycharacters{\setupcommandsintype}% - \let\obeytabs\ignoretabs} -\setvalue{\??tp:\c!option:\v!slanted }{\let\obeycharacters\setupslantedtype - \let\obeytabs\ignoretabs} +\setvalue{\??tp:\c!option:\v!slanted }{\let\obeycharacters\setupslantedtype} \setvalue{\??tp:\c!option:\s!unknown }{\setupprettiesintype{\typingparameter\c!option}% - \let\obeycharacters\setupprettytype - \let\obeytabs\ignoretabs} + \let\obeycharacters\setupprettytype} +%setvalue{\??tp:\c!option:\v!commands }{\def\obeycharacters{\setupcommandsintype}} \def\dosetverbatimfont {\redoconvertfont\dosetfontattribute{\currenttypingclass\currenttyping}\c!style \normalnoligatures\font} -\def\setupcommonverbatim +\unexpanded\def\setupcommonverbatim {\let\prettyidentifier\s!default % -% \doifelse{\typingparameter\c!text}\v!yes -% \naturaltextexttrue -% \naturaltextextfalse \def\prettyidentifierfont{\typingparameter\c!icommand}% \def\prettyvariablefont {\typingparameter\c!vcommand}% \def\prettynaturalfont {\typingparameter\c!ccommand}% % - \doif{\typingparameter\c!space}\v!on - {\def\obeyspaces{\setcontrolspaces}}% - \doif{\typingparameter\c!page }\v!no - {\def\obeypages {\ignorepages}}% - % \ignorehyphens % default \getvalue{\??tp:\c!lines:\typingparameter\c!lines}% \getvalue{\??tp:\c!empty:\typingparameter\c!empty}% \getvalue{\??tp:\c!option:\ifcsname\??tp:\c!option:\typingparameter\c!option\endcsname\typingparameter\c!option\else\s!unknown\fi}% - \doifnumberelse{\typingparameter\c!tab} - {\def\obeytabs{\setfixedtabskips{\typingparameter\c!tab}}}% - \donothing \setupverbatimcolor} \newtoks \everyinitializeverbatim -\def\doinitializeverbatim +\def\ignorebeginofpretty [#1]{} +\def\ignoreendofpretty {} + +\def\doverbatimbop{\bgroup\beginofpretty} +\def\doverbatimeop{\endofpretty\egroup} +\def\doverbatimsop{\endofpretty\egroup\bgroup\beginofpretty} + +\let\noverbatimbop\ignorebeginofpretty +\let\noverbatimeop\ignoreendofpretty +\let\noverbatimsop\ignorebeginofpretty + +\def\doinitializeverbatim % todo: combine all in one call is faster {\ctxlua{buffers.visualizers.reset()}% - \def\obs{\obeyedspace}% + \doifelse{\typingparameter\c!space}\v!on + {\let\obs\fastcontrolspace}% + {\let\obs\specialobeyedspace}% + \edef\askedverbatimtab{\typingparameter\c!tab}% + \doifelse\askedverbatimtab\v!no + {\ctxlua{buffers.settablength(1)}} + {\doifnumberelse{\askedverbatimtab} + {\ctxlua{buffers.settablength(\askedverbatimtab)}} + {\ctxlua{buffers.settablength()}}}% \ctxlua{buffers.doifelsevisualizer("\prettyidentifier")} {\ctxlua{buffers.setvisualizer("\prettyidentifier")}% - \def\bop{\bgroup\beginofpretty}% - \def\eop{\endofpretty\egroup}% - \def\sop{\endofpretty\egroup\bgroup\beginofpretty}}% - {\let\bop\donothing - \let\eop\donothing - \let\sop\donothing}% + \let\bop\doverbatimbop + \let\eop\doverbatimeop + \let\sop\doverbatimsop}% + {\ctxlua{buffers.setvisualizer("\v!typing")}% or resetdefaultvisualizer + \let\bop\noverbatimbop + \let\eop\noverbatimeop + \let\sop\noverbatimsop}% \relax\the\everyinitializeverbatim\relax} \appendtoks @@ -163,14 +170,54 @@ % \typeTEX{\example---oeps}. this---ligates---again. % \type {\example---oeps}. this---ligates---again. -\def\setupcommandsintype % can also be \string\ - {\ctxlua{ - buffers.visualizers.enableescape = true - buffers.visualizers.escapetoken = \!!bs\typingparameter\c!escape\!!es - }% - \setevalue{\typingparameter\c!escape}{\typingparameter\c!escape}} +%D \startbuffer +%D \setuptyping[TEX][escape=yes] +%D +%D \startTEX +%D /BTEX\em sometex/ETEX +%D /BTEX\em sometex/ETEX \after +%D \before /BTEX\em sometex/ETEX +%D \before /BTEX\em sometex/ETEX \after +%D \before /BTEX\em sometex/ETEX \inbetween /BTEX\em sometex/ETEX \after +%D \before \after +%D \stopTEX +%D \stopbuffer +%D +%D \typebuffer \start \getbuffer \stop +%D +%D \startbuffer +%D \setuptyping[TEX][escape={[[,]]}] +%D +%D \startTEX +%D [[\em sometex]] +%D [[\em sometex]] \after +%D \before [[\em sometex]] +%D \before [[\em sometex]] \after +%D \before [[\em sometex]] \inbetween [[\em sometex]] \after +%D \before \after +%D \stopTEX +%D \stopbuffer +%D +%D \typebuffer \start \getbuffer \stop +%D +%D \startbuffer +%D \setuptyping[TEX][escape=//] +%D +%D \startTEX +%D //\em sometex +%D \before //\em sometex +%D \stopTEX +%D +%D \typebuffer \start \getbuffer \stop + +\unexpanded\def\setupcommandsintype + {\ctxlua{buffers.set_escape("\currenttyping",\!!bs\typingparameter\c!escape\!!es)}} + +\appendtoks + \setupcommandsintype +\to \everyinitializeverbatim -\def\setupslantedtype +\unexpanded\def\setupslantedtype {\slantedtypepermittedtrue} \ifx\setupprettytype \undefined \let\setupprettytype \relax \fi @@ -272,15 +319,6 @@ \catcode`\>=\@@other \futurelet\next\dodotype} -% \def\dodotype -% {\ifx\next\bgroup -% \@EA\dodotypeA -% \else\if\next<% -% \doifelse{\typingparameter\c!option}\v!none{\@EAEAEA\dodotypeB}{\@EAEAEA\dodotypeC}% -% \else -% \@EAEAEA\dodotypeD -% \fi\fi} - \def\dodotype {\ifx\next\bgroup \@EA\dodotypeA @@ -362,19 +400,15 @@ \setupcommonverbatim %\dosetverbatimfont %\setverbatimspaceskip - %\let\obeytabs \ignoretabs % probably not needed - %\let\obeylines\ignorelines % probably not needed - %\let\obeypages\ignorepages % probably not needed %\setupcopyverbatim % not needed - \setcatcodetable\vrbcatcodes - } + \setcatcodetable\vrbcatcodes} %D \macros %D {setuptype} %D %D Some characteristics of \type{\type} can be set up by: -\def\setuptype +\unexpanded\def\setuptype {\dodoubleempty\dosetuptype} \def\dosetuptype[#1][#2]% @@ -407,7 +441,7 @@ \veryraggedright} \def\ignorehyphens - {% \language\minusone % extra bonus, the \null should do the job too + {\language\minusone % extra bonus, the \null should do the job too \let\obeyedspace \specialobeyedspace \let\controlspace\specialcontrolspace \spaceskip.5em\relax} @@ -465,6 +499,19 @@ \unexpanded\def\mat{\groupedcommand{\setgroupedtype\$}{\$}} \unexpanded\def\dis{\groupedcommand{\setgroupedtype\$\$}{\$\$}} +\let\normaltexttex\tex +\let\normaltextarg\arg +\let\normaltextmat\mat +\let\normaltextdis\dis + +\def\astype + {\groupedcommand\dorawtype\relax} + +\def\dorawtype + {\let\currenttypingclass\??ty + \normalverbatimcolor % \verbatimcolor + \dosetverbatimfont} + %D \macros %D {starttyping} %D @@ -491,9 +538,9 @@ {\setxvalue{\currenttypingclass\currenttyping#1}{#2}} \setvalue{\??tp:\c!blank:\v!standard}{\ctxparskip} -\setvalue{\??tp:\c!blank:\v!small }{\blankokleinmaat} -\setvalue{\??tp:\c!blank:\v!medium }{\blankomiddelmaat} -\setvalue{\??tp:\c!blank:\v!big }{\blankogrootmaat} +\setvalue{\??tp:\c!blank:\v!small }{\smallskipamount} +\setvalue{\??tp:\c!blank:\v!medium }{\medskipamount} +\setvalue{\??tp:\c!blank:\v!big }{\bigskipamount} \setvalue{\??tp:\c!blank:\v!halfline}{.5\baselineskip} \setvalue{\??tp:\c!blank:\v!line }{\baselineskip} \setvalue{\??tp:\c!blank:\v!none }{\zeropoint} @@ -519,9 +566,11 @@ \else \doadaptleftskip{\typingparameter\c!margin}% \fi + % no symbolic blanks ! \edef\!!stringa{\executeifdefined{\??bo\typingparameter\c!blank}{\typingparameter\c!blank}}% \scratchskip\executeifdefined{\??tp:\c!blank:\!!stringa}\!!stringa\relax \ifgridsnapping + % this will be adapted \ifdim\scratchskip=.5\baselineskip\relax \edef\verbatimbaselineskip{\the\scratchskip}% new \else @@ -530,6 +579,7 @@ \else \edef\verbatimbaselineskip{\the\scratchskip}% \fi +\doifsomething{\typingparameter\c!align}{\setupalign[\typingparameter\c!align]}% \setupcommonverbatim} %D The basic display verbatim commands are defined in an @@ -582,22 +632,21 @@ \def\dotypefileverbatim {\doinitializeverbatim - \ctxlua{buffers.typefile("\readfilename")}} + \beginofverbatimlines + \ctxlua{buffers.typefile("\readfilename","\typingparameter\c!strip","\typingparameter\c!range")}% + \endofverbatimlines} \def\dotypefilelinesverbatim#1#2% - {#1% - \doinitializeverbatim - \ctxlua{buffers.typefile("\readfilename")}% - #2} + {#1\dotypefileverbatim#2} \unexpanded\def\dotypeblockverbatim#1#2% - {\dowithbuffer{_typing_}{#1}{#2} - {} - {\doinitializeverbatim - \beginofverbatimlines - \ctxlua{buffers.type("_typing_")}% - \endofverbatimlines - \csname#2\endcsname}} + {\dowithbuffer{_typing_}{#1}{#2} + {} + {\doinitializeverbatim + \beginofverbatimlines + \ctxlua{buffers.type("_typing_","\typingparameter\c!strip","\typingparameter\c!range")}% + \endofverbatimlines + \csname#2\endcsname}} \def\dododostarttyping[#1]% {\typingparameter\c!before @@ -612,8 +661,7 @@ {\stopverbatimcolor \stoppacked % includes \egroup \typingparameter\c!after - \egroup - \dochecknextindentation{\??tp#1}% + \normalexpanded{\egroup\checknextindentation[\typingparameter\c!indentnext]}% \dorechecknextindentation} %D Line numbering for files is combined with filtering, while @@ -630,7 +678,7 @@ %D ... %D \stopcode %D -%D \startcode[continue] +%D \startcode[start=continue] %D ... %D ... %D \stopcode @@ -640,6 +688,27 @@ %D \stopcode %D \stoptyping +%D Ranges: +%D +%D \starttyping +%D % label:start:one +%D +%D \def\MyMacro +%D {just an example} +%D +%D % label:stop:one +%D +%D \starttext +%D +%D % \typefile[file][range={3,6}]{whatever.tex} +%D +%D % \typefile[file][range={3,+2}]{whatever.tex} +%D +%D \typefile[file][range={label:start:one,label:stop:one}]{whatever.tex} +%D +%D \stoptext +%D \stoptyping + %D \macros %D {setuptyping} %D @@ -655,7 +724,7 @@ \getparameters[\??tp][#1]% \fi} -\def\setuptyping +\unexpanded\def\setuptyping {\dodoubleempty\dosetuptyping} %D \macros @@ -664,11 +733,11 @@ %D Specific inline verbatim commands can be defined with the %D following command. -\def\definetype +\unexpanded\def\definetype {\dodoubleempty\dodefinetype} \def\dodefinetype[#1][#2]% - {\unexpanded\setvalue{#1}{\dotype{#1}}% + {\setuvalue{#1}{\dotype{#1}}% \getparameters[\??ty#1][#2]} %D \macros @@ -688,7 +757,9 @@ %D %D The definitions default to the standard typing values. -\def\presettyping[#1][#2]% +% TODO: parent etc ! + +\def\presettyping[#1][#2]% brrr also use parent here {\copyparameters[\??tp#1][\??tp][\c!color,\c!style]% \getparameters [\??tp#1][#2]} @@ -697,7 +768,7 @@ \setvalue{\e!stop #1}{\dostoptyping {#1}}% \presettyping[#1][#2]} -\def\definetyping +\unexpanded\def\definetyping {\dodoubleempty\dodefinetyping} %D We can use some core color commands. These are faster than @@ -705,7 +776,7 @@ %D line basis. %D %D \starttyping -%D \def\setupverbatimcolor% +%D \unexpanded\def\setupverbatimcolor% %D {\edef\prettypalet{\prettyidentifier\typingparameter\c!palet}% %D \def\beginofpretty[##1]{\startcolormode{\prettypalet:##1}}% %D \def\endofpretty {\stopcolormode}} @@ -720,10 +791,7 @@ \def\normalstopverbatimcolor {\stopcolor} \def\normalverbatimcolor {\getvalue{\typingparameter\c!color}}% command ! -\def\ignorebeginofpretty [#1]{} -\def\ignoreendofpretty {} - -\def\setupnormalprettyverbatim +\unexpanded\def\setupnormalprettyverbatim {\edef\prettypalet{\prettyidentifier\prettypalet}% \let\beginofpretty \normalbeginofpretty \let\endofpretty \normalendofpretty @@ -731,7 +799,7 @@ \let\stopverbatimcolor \normalstopverbatimcolor \let\verbatimcolor \normalverbatimcolor} -\def\setupignoreprettyverbatim +\unexpanded\def\setupignoreprettyverbatim {\let\prettypalet \empty \let\beginofpretty \ignorebeginofpretty \let\endofpretty \ignoreendofpretty @@ -739,7 +807,7 @@ \let\stopverbatimcolor \normalstopverbatimcolor \let\verbatimcolor \normalverbatimcolor} -\def\setupverbatimcolor +\unexpanded\def\setupverbatimcolor {\edef\prettypalet{\typingparameter\c!palet}% \ifx\prettypalet\empty \setupignoreprettyverbatim @@ -799,33 +867,28 @@ \fi\fi} \def\dosetuptypelinenumbering#1% fuzzy - {\ifcsname\currenttypingclass\currenttyping\c!start\endcsname \else - \setuptyping[\currenttyping][\c!start=1,\c!stop=,\c!step=1,\c!nlines=]% - \fi - \setuptyping[\currenttyping][#1]% + {%\ifcsname\currenttypingclass\currenttyping\c!start\endcsname \else + % \setuptyping[\currenttyping][\c!start=1,\c!stop=,\c!step=1,\c!continue=\v!no,\c!nlines=]% + %\fi + \doifassignmentelse{#1}{\setuptyping[\currenttyping][#1]}\donothing \doifelse{\typingparameter\c!numbering}\v!file {% kind of special: filters lines ! - \setuplinenumbering[\c!method=\v!file]% + \setuplinenumbering[\currenttyping][\c!method=\v!file]% \donetrue} {\doifelse{\typingparameter\c!numbering}\v!line - {% \setuplinenumbering defaults start/step to 1/1, so we need - \doifinsetelse\v!continue{#1,\typingparameter\c!start} - {\scratchcounter0\typingparameter\c!n - \setxtypingparameter\c!start{\ifnum\scratchcounter=0 1\else\number\scratchcounter\fi}}% - {\doifnothing{\typingparameter\c!start}{\settypingparameter\c!start{1}}}% - \doifnothing{\typingparameter\c!step}{\settypingparameter\c!step{1}}% - \setuplinenumbering - [\c!method=\v!type, - \c!start=\typingparameter\c!start, - \c!stop=\typingparameter\c!stop, - \c!step=\typingparameter\c!step]% + {\doifinset\v!continue{#1}{\setuptyping[\currenttyping][\c!continue=\v!yes]}% fails: \settypingparameter\c!continue{\v!yes} \donetrue} {\donefalse}}% \ifdone - \ifx\startlinenumbering\undefined \let\startlinenumbering\relax \fi - \ifx\stoplinenumbering \undefined \let\stoplinenumbering \relax \fi - \def\beginofverbatimlines{\startlinenumbering}% - \def\endofverbatimlines {\stoplinenumbering\setxtypingparameter\c!n{\number\linenumber}}% + \edef\beginofverbatimlines{\noexpand\startlinenumbering + [\currenttyping]% + [\c!continue=\typingparameter\c!continue, + \c!method=\v!type, + \c!start=\typingparameter\c!start, + \c!stop=\typingparameter\c!stop, % ? + \c!step=\typingparameter\c!step]% + }% + \def\endofverbatimlines{\stoplinenumbering}% \fi} \def\reporttypingerror#1% temp hack @@ -880,7 +943,8 @@ {\doiflocfileelse{#1} {\firstoftwoarguments} {\doifinputfileelse{#1} - {\def\readfilename{\pathplusfile\filepath{#1}}\firstoftwoarguments} % messy, looks wrong too +% {\def\readfilename{\pathplusfile\filepath{#1}}\firstoftwoarguments} % messy, looks wrong too + {\def\readfilename{#1}\firstoftwoarguments} % messy, looks wrong too {\secondoftwoarguments}}} \def\dodotypefile[#1][#2]#3% @@ -930,14 +994,16 @@ \c!evenmargin=\!!zeropoint, \c!oddmargin=\!!zeropoint, \c!blank=\v!line, - \c!escape=/, % beware \string\ , should also be accepted + \c!escape=, % yes | no | START,STOP BTEX,ETEX \c!numbering=\v!no, \c!lines=, + \c!range=, \c!empty=, \c!start=1, \c!stop=, \c!step=1, \c!continue=, + \c!strip=\v!no, % auto or number \c!nlines=] \definetyping[\v!typing] diff --git a/Master/texmf-dist/tex/context/base/bxml-apa.mkiv b/Master/texmf-dist/tex/context/base/bxml-apa.mkiv new file mode 100644 index 00000000000..5fc87e5efc6 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/bxml-apa.mkiv @@ -0,0 +1,613 @@ +%D \module +%D [ file=bxml-apa, +%D version=2010.05.14, % based on bibl-apa.tex +%D title=APA bibliography style, +%D subtitle=Publications, +%D author={Taco Hoekwater \& Hans Hagen}, +%D date=\currentdate, +%D copyright={Hans Hagen \& Taco Hoekwater}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\unprotect + +% helpers (todo) + +\let\maybeyear\firstofoneargument +\let\etalchar \firstofoneargument + +\setupbibtexcitationvariants + [author,year] + [\c!andtext={ and }, + \c!otherstext={ et al.}, + \c!pubsep={, }, + \c!lastpubsep={ and }, + \c!compress=\v!no, + \c!inbetween={ }, + \c!left={(}, + \c!right={)}] + +\setupbibtexcitationvariants + [authoryear] + [\c!andtext={ and }, + \c!otherstext={ et al.}, + \c!pubsep={, }, + \c!lastpubsep={ and }, + \c!compress=\v!yes, + \c!inbetween={ }, + \c!left={(}, + \c!right={)}] + +\setupbibtexcitationvariants + [authoryears] + [\c!andtext={ and }, + \c!otherstext={ et al.}, + \c!pubsep={, }, + \c!lastpubsep={ and }, + \c!compress=\v!yes, + \c!inbetween={, }, + \c!left={(}, + \c!right={)}] + +\setupbibtexcitationvariants + [key,serial,authornum,page,short,type,doi,url] + [\c!andtext={ and }, + \c!otherstext={ et al.}, + \c!pubsep={, }, + \c!lastpubsep={ and }, + \c!compress=\v!no, + \c!inbetween={ }, + \c!left={[}, + \c!right={]}] + +\setupbibtexcitationvariants + [num] + [\c!andtext={ and }, + \c!otherstext={ et al.}, + \c!pubsep={, }, + \c!lastpubsep={ and }, + \c!compress=\v!yes, + \c!inbetween={--}, + \c!left={[}, + \c!right={]}] + +\setupbibtexpublications + [\c!sorttype=, + \c!criterium=, + \c!refcommand=authoryears, + \c!numbering=\v!no, + \c!autohang=\v!no] + +\setupbibtexpublications + [\c!width=2em, % 24pt, + \c!artauthor=invertedshort, + \c!editor=invertedshort, + \c!author=invertedshort, + \c!namesep={, }, + \c!lastnamesep={ and }, + \c!finalnamesep={ and }, + \c!firstnamesep={, }, + \c!juniorsep={ }, + \c!vonsep={ }, + \c!surnamesep={, }, + \c!authoretallimit=5, + \c!editoretallimit=5, + \c!artauthoretallimit=5, + \c!authoretaldisplay=5, + \c!editoretaldisplay=5, + \c!artauthoretaldisplay=5, + \c!authoretaltext={ et al.}, + \c!editoretaltext={ et al.}, + \c!artauthoretaltext={ et al.}] + +% common + +\startxmlsetups bibtex:apa:common:wherefrom + \bibxmldoifelse {address} { + \getvariable{bibtex:temp}{left} + \bibxmldoifelse {country} { + \bibxmldoifelse {\getvariable{bibtex:temp}{label}} { + \bibxmlflush{address}\bibtexcomma\bibxmlflush{country}: \bibxmlflush{\getvariable{bibtex:temp}{label}} + } { + \bibxmlflush{address}\bibtexcomma\bibxmlflush{country} + } + } { + \bibxmldoifelse {\getvariable{bibtex:temp}{label}} { + \bibxmlflush{address}\bibtexcomma\bibxmlflush{\getvariable{bibtex:temp}{label}} + } { + \bibxmlflush{address} + } + } + \getvariable{bibtex:temp}{right} + } { + \bibxmldoifelse {country} { + \getvariable{bibtex:temp}{left} + \bibxmldoifelse {\getvariable{bibtex:temp}{label}} { + \bibxmlflush{country}: \bibxmlflush{\getvariable{bibtex:temp}{label}} + } { + \bibxmlflush{country} + } + \getvariable{bibtex:temp}{right} + } { + \bibxmldoifelse {\getvariable{bibtex:temp}{label}} { + \getvariable{bibtex:temp}{left} + \bibxmlflush{\getvariable{bibtex:temp}{label}} + \getvariable{bibtex:temp}{right} + } { + \getvariable{bibtex:temp}{otherwise} + } + } + } +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:publisher + \setvariables[bibtex:temp][label=publisher,left=,right=,otherwise=]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups +\startxmlsetups bibtex:apa:common:organization + \setvariables[bibtex:temp][label=organization,left=,right=,otherwise=]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups +\startxmlsetups bibtex:apa:common:school + \setvariables[bibtex:temp][label=school,left=,right=,otherwise=]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups +\startxmlsetups bibtex:apa:common:institution + \setvariables[bibtex:temp][label=institution,left=,right=,otherwise=]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:school:subsentence + \setvariables[bibtex:temp][label=school,left=\bibtexcomma,right=\bibtexperiod,otherwise=\bibtexperiod]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups +\startxmlsetups bibtex:apa:common:institution:subsentence + \setvariables[bibtex:temp][label=institution,left=\bibtexcomma,right=\bibtexperiod,otherwise=\bibtexperiod]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups +\startxmlsetups bibtex:apa:common:publisher:sentence + \setvariables[bibtex:temp][label=publisher,left=\bibtexspace,right=\bibtexperiod,otherwise=]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups +\startxmlsetups bibtex:apa:common:organization:sentence + \setvariables[bibtex:temp][label=organization,left=\bibtexspace,right=\bibtexperiod,otherwise=]\relax + \bibxmlsetup{bibtex:apa:common:wherefrom} +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:title-and-series + \bibxmldoif {title} { + \bibxmlflush{title} + \bibxmldoif {series} { + \bibtexlparent\bibxmlflush{series}\bibtexrparent + } + \bibtexperiod + } +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:title-it-and-series + \bibxmldoif {title} { + \bgroup\it\bibxmlflush{title}\/\egroup + \bibxmldoif {series} { + \bibtexlparent\bibxmlflush{series}\bibtexrparent + } + \bibtexperiod + } +\stopxmlsetups + +\disablemode[bibtex:apa:edited-book] + +\startxmlsetups bibtex:apa:common:author-and-year + \bibxmldoif {author} { + \bibxmlsetup{bibtex:format:author} + } + \bibxmldoif {year} { + \bibtexlparent\bibxmlflush{year}\bibtexrparent + } + \bibtexperiod +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:author-or-key-and-year + \bibxmldoifelse {author} { + \bibxmlsetup{bibtex:format:author} + } { + \bibxmldoif {key} { + \bibtexlbracket\bibxmlsetup{bibtex:format:key}\bibtexrbracket + } + } + \bibxmldoif {year} { + \bibtexlparent\bibxmlflush{year}\bibtexrparent + } + \bibtexperiod +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:author-editors-crossref-year + \bibxmldoif {author} { + \bibxmlsetup{bibtex:format:author} + } { + \bibxmldoifelse {editor} { + \enablemode[bibtex:apa:edited-book] + \xmlsetup{#1}{bibtex:format:editor} + \bibtexcomma\bibtexsingularplural{editor}{editors} + } { + % weird period + \bibxmldoif {crossref} { + \bibtexlbracket\bibxmlsetup{bibtex:format:crossref}\bibtexrbracket\bibtexperiod + } + } + } + \bibxmldoif {year} { + \bibtexlparent\bibxmlflush{year}\bibtexrparent + } + \bibtexperiod +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:editor-or-key-and-year + \bibxmldoifelse {editor} { + \enablemode[bibtex:apa:edited-book] + \xmlsetup{#1}{bibtex:format:editor} + \bibtexcomma\bibtexsingularplural{editor}{editors} + } { + \bibxmldoif {key} { + \bibtexlbracket\bibxmlsetup{bibtex:format:key}\bibtexrbracket + } + } + \bibtexspace + \bibxmldoif {year} { + \bibtexlparent\bibxmlflush{year}\bibtexrparent + } + \bibtexperiod +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:note + \bibxmldoif {note} { + \bibtexspace\bibxmlflush{note}\bibtexperiod + } +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:comment + \bibxmldoif {comment} { + \bibtexspace\bibxmlflush{comment}\bibtexperiod + } +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:pages:p + \bibxmldoif {pages} { + \bibtexspace\bibxmlflush{pages}\bibtexspace p\bibtexperiod + } +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:pages:pp + \bibxmldoif {pages} { + \bibtexspace\bibxmlflush{pages}\bibtexspace pp\bibtexperiod + } +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:pages:pages + \bibxmldoif {pages} { + \bibtexcomma pages~\bibxmlflush{pages} + } +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:edition:sentense + \bibxmldoif {edition} { + \bibtexspace\bibxmlflush{edition}\bibtexspace edition\bibtexperiod + } +\stopxmlsetups + +% check ewhen the next is used (no period) + +% \startxmlsetups bibtex:apa:common:edition +% \bibxmldoif {edition} { +% \bibtexspace\bibxmlflush{edition}\bibtexspace edition +% } +% \stopxmlsetups + +% we can share more, todo + +% specific + +\startxmlsetups bibtex:apa:article + \bibxmlsetup{bibtex:apa:common:author-or-key-and-year} + \bibxmldoif {title} { + \bibxmlflush{title}\bibtexperiod + } + \bibxmldoifelse {journal} { + \bgroup\it\bibxmlflush{journal}\/\egroup + } { + \bibxmldoif {crossref} { + In\bibtexspace\bibxmlflush{crossref} + } + } + \bibxmldoifelse {volume} { + \bibtexcomma\bgroup\it\bibxmlflush{volume}\/\egroup + \bibxmldoif {issue} { + \bibtexlparent\bibxmlflush{issue}\bibtexlparent + } + \bibxmldoif {pages} { + \bibtexcomma\bibxmlflush{pages} + } + \bibtexperiod + } { + \bibxmlsetup{bibtex:apa:common:pages:pp} + } + \bibxmlsetup{bibtex:apa:common:note} + \bibxmlsetup{bibtex:apa:common:comment} +\stopxmlsetups + +\startxmlsetups bibtex:apa:book + \bibxmlsetup{bibtex:apa:common:author-editors-crossref-year} + \bibxmldoif {title} { + \bgroup\it\bibxmlflush{title}\/\egroup + \doifmodeelse {bibtex:apa:edited-book} { + \bibxmldoifelse {volume} { + \bibtexspace Number\nonbreakablespace\bibxmlflush{volume} + \bibxmldoifelse {series} { + \bibtexspace in\nonbreakablespace\bibxmlflush{series}\bibtexperiod + } { + \bibxmldoifelse {crossref} { + \bibtexspace in\bibtexlbracket\bibxmlsetup{bibtex:format:crossref}\bibtexrbracket + } { + \bibtexperiod + } + } + } { + \bibxmldoif {series} { + \bibtexspace\bibxmlflush{series} + } + \bibtexperiod + } + } { + \bibxmldoifelse {crossref} { + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter} + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibtexperiod + \bibxmldoif {volume} { + Volume\nonbreakablespace\bibxmlflush{volume}\bibtexspace of\nonbreakablespace + } + } { + \bibxmldoif {volume} { + \bibtexcomma volume\nonbreakablespace\bibxmlflush{volume} + \bibxmldoif {series} { + \bibtexspace of\nonbreakablespace\bgroup\it\bibxmlflush{series}\/\egroup + } + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter} + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibtexperiod + } + } + } + } + \bibxmlsetup{bibtex:apa:common:edition:sentence} + \bibxmlsetup{bibtex:apa:common:publisher:sentence} + \bibxmlsetup{bibtex:apa:common:pages:p}% twice? + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:inbook + \bibxmlsetup{bibtex:apa:common:author-editors-crossref-year} + \bibxmldoifelse {title} { + \bgroup\it\bibxmlflush{title}\/\egroup + } { + \doifmodeelse {bibtex:apa:edited-book} { + \bibxmldoifelse {volume} { + \bibtexspace number\nonbreakablespace\bibxmlflush{volume} + \bibxmldoifelse {series} { + \bibtexspace in\nonbreakablespace\bibxmlflush{series}\bibtexperiod + } { + \bibxmldoifelse {crossref} { + \bibtexspace in\bibtexlbracket\bibxmlsetup{bibtex:format:crossref}\bibtexrbracket + } { + \bibtexperiod + } + } + } { + \bibxmldoif {series} { + \bibtexspace\bibxmlflush{series}\bibtexperiod + } + } + } { + \bibxmldoifelse {crossref} { + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter} + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibxmldoif {volume} { + Volume\nonbreakablespace\bibxmlflush{volume}\bibtexspace of\nonbreakablespace + } + \bibxmldoif {crossref} { + \bibtexlbracket\bibxmlsetup{bibtex:format:crossref}\bibtexrbracket + } + } { + \bibxmldoif {volume} { + \bibtexcomma volume\nonbreakablespace\bibxmlflush{volume} + \bibxmldoif {series} { + \bibtexspace of\nonbreakablespace\bgroup\it\bibxmlflush{series}\/\egroup + } + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter} + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibtexperiod + } + } + } + } + \bibtexspace + \bibxmlsetup{bibtex:apa:common:edition:sentence} + \bibxmlsetup{bibtex:apa:common:publisher} + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:booklet + \bibxmlsetup{bibtex:apa:common:author-or-key-and-year} + \bibxmlsetup{bibtex:apa:common:title-it-and-series} + \bibxmlsetup{bibtex:apa:common:edition:sentence} + \bibxmlsetup{bibtex:apa:common:publication:sentence} + \bibxmlsetup{bibtex:apa:common:pages:p} + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:manual + \bibxmlsetup{bibtex:apa:common:author-or-key-and-year} + \bibxmlsetup{bibtex:apa:common:title-it-and-series} + \bibxmlsetup{bibtex:apa:common:edition:sentence} + \bibxmlsetup{bibtex:apa:common:organization:sentence} + \bibxmlsetup{bibtex:apa:common:pages:p} + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:incollection + \bibxmlsetup{bibtex:apa:common:author-and-year} + \bibxmldoif {arttitle} { + \bibxmlflush{arttitle}\bibtexperiod + } + In\bibtexspace + \bibxmldoifelse {title} { + \bibxmlsetup{bibtex:format:editors}\bibtexcomma + \bgroup\it\bibxmlflush{title}\/\egroup + \bibxmldoif {series} { + \bibxmldoif {volume} { + \bibtexcomma number\bibtexspace\bibxmlflush{volume}\bibtexspace in + } + \bibtexspace\bibxmlflush{series} + } + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter}\bibtexspace + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibxmldoif {edition} { + \bibtexspace\bibxmlflush{edition}\bibtexspace edition + } + \bibxmlsetup{bibtex:apa:common:publisher:sentence} + } { + \bibxmldoif {crossref} { + \bibtexlbracket\bibxmlsetup{bibtex:format:crossref}\bibtexrbracket + } + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter} + } + \bibtexspace + \bibxmlsetup{bibtex:apa:common:pages:pages} + } + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:inproceedings + \bibxmlsetup{bibtex:apa:common:author-and-year} + \bibxmldoif {arttitle} { + \bibxmlflush{arttitle}\bibtexperiod + } + In\bibtexspace + \bibxmldoifelse {title} { + \bibxmldoif {editors} { + \bibxmlflush{bibtex:apa:format:editors} + \bibtexcomma\bibtexsingularplural{editor}{editors}\bibtexcomma + } + \bgroup\it\bibxmlflush{title}\/\egroup + \bibxmldoif {series} { + \bibxmldoif {volume} { + \bibtexcomma number~\bibxmlflush{volume} in + } + \bibtexspace + \bibxmlflush{series} + } + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter}\bibtexspace + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibtexperiod + \bibxmlsetup{bibtex:apa:common:organization:sentence} + } { + \bibxmldoif {crossref} { + \bibtexlbracket\bibxmlsetup{bibtex:format:crossref}\bibtexrbracket + } + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter}\bibtexspace + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibtexperiod + } + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:proceedings + \bibxmlsetup{bibtex:apa:common:editor-or-key-and-year} + \bibxmldoif {title} { + \bgroup\it\bibxmlflush{title}\/\egroup + \bibxmldoif {volume} { + \bibtexcomma number\bibtexspace\bibxmlflush{volume}\bibtexspace in\bibtexspace + } + \bibxmldoif {chapter} { + \bibtexcomma\bibxmlflush{chapter}\bibtexspace + } + \bibxmlsetup{bibtex:apa:common:pages:pages} + \bibtexperiod + \bibxmlsetup{bibtex:apa:common:organization:sentence} + } + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:common:thesis + \bibxmlsetup{bibtex:apa:common:author-and-year} + \bibxmlsetup{bibtex:apa:common:title-it-and-series} + \bibxmldoifelse {type} { + \bibxmlflush{type} + } { + \getvariable{bibtex:temp}{label} + } + \bibxmlsetup{bibtex:apa:common:school:subsentence} + \bibxmlsetup{bibtex:apa:common:pages:p} + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:mastersthesis + \setvariables[bibtex:temp][label=Master's thesis] + \bibxmlsetup{bibtex:apa:common:thesis} +\stopxmlsetups + +\startxmlsetups bibtex:apa:phdthesis + \setvariables[bibtex:temp][label=PhD thesis] + \bibxmlsetup{bibtex:apa:common:thesis} +\stopxmlsetups + +\startxmlsetups bibtex:apa:techreport + \bibxmlsetup{bibtex:apa:common:author-and-year} + \bibxmlsetup{bibtex:apa:common:title-and-series} + \bibxmldoifelse {type} { + \bibxmlflush{type} + \bibxmldoif {volume} { + \bibtexspace\bibxmlflush{volume} + } + } { + \bibtexspace Technical Report + } + \bibxmlsetup{bibtex:apa:common:institution:subsentence} + \bibxmlsetup{bibtex:apa:common:pages:p} + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:misc + \bibxmlsetup{bibtex:apa:common:author-and-year} + \bibxmlsetup{bibtex:apa:common:title-and-series} + \bibxmlsetup{bibtex:apa:common:publisher:sentence} + \bibxmlsetup{bibtex:apa:common:pages:p} + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\startxmlsetups bibtex:apa:unpublished + \bibxmlsetup{bibtex:apa:common:author-and-year} + \bibxmlsetup{bibtex:apa:common:title-and-series} + \bibxmlsetup{bibtex:apa:common:pages:p} + \bibxmldoif {type} { + \bibtexlparent\bibxmlflush{type}\bibtexrparent + } + \bibxmlsetup{bibtex:apa:common:note} +\stopxmlsetups + +\protect \endinput diff --git a/Master/texmf-dist/tex/context/base/catc-ctx.tex b/Master/texmf-dist/tex/context/base/catc-ctx.tex index 83e802e7773..21e7d013641 100644 --- a/Master/texmf-dist/tex/context/base/catc-ctx.tex +++ b/Master/texmf-dist/tex/context/base/catc-ctx.tex @@ -14,13 +14,13 @@ %D We prefer to define relevant catcode tables in this file instead %D of everywhere around. -\ifx\ctxcatcodes \undefined \newcatcodetable \ctxcatcodes \fi -\ifx\mthcatcodes \undefined \newcatcodetable \mthcatcodes \fi % math, not used, too tricky -\ifx\xmlcatcodesn\undefined \newcatcodetable \xmlcatcodesn \fi % normal -\ifx\xmlcatcodese\undefined \newcatcodetable \xmlcatcodese \fi % entitle -\ifx\xmlcatcodesr\undefined \newcatcodetable \xmlcatcodesr \fi % reduce -\ifx\typcatcodesa\undefined \newcatcodetable \typcatcodesa \fi % { } -\ifx\typcatcodesb\undefined \newcatcodetable \typcatcodesb \fi % < > +\ifdefined \ctxcatcodes \else \newcatcodetable \ctxcatcodes \fi +\ifdefined \mthcatcodes \else \newcatcodetable \mthcatcodes \fi % math, not used, too tricky +\ifdefined \xmlcatcodesn \else \newcatcodetable \xmlcatcodesn \fi % normal +\ifdefined \xmlcatcodese \else \newcatcodetable \xmlcatcodese \fi % entitle +\ifdefined \xmlcatcodesr \else \newcatcodetable \xmlcatcodesr \fi % reduce +\ifdefined \typcatcodesa \else \newcatcodetable \typcatcodesa \fi % { } +\ifdefined \typcatcodesb \else \newcatcodetable \typcatcodesb \fi % < > \startcatcodetable \ctxcatcodes \catcode`\^^I = 10 @@ -202,7 +202,7 @@ \catcodetable \ctxcatcodes \let\defaultcatcodetable\ctxcatcodes -\let\xmlcatcodes \xmlcatcodesn +\let\xmlcatcodes \xmlcatcodesn % beware, in mkiv we use \notcatcodes \endinput diff --git a/Master/texmf-dist/tex/context/base/catc-def.tex b/Master/texmf-dist/tex/context/base/catc-def.tex index 0346f6dae7a..e80cfe12539 100644 --- a/Master/texmf-dist/tex/context/base/catc-def.tex +++ b/Master/texmf-dist/tex/context/base/catc-def.tex @@ -13,12 +13,12 @@ %D The following catcode tables are rather \CONTEXT\ independent. -\ifx\nilcatcodes \undefined \newcatcodetable \nilcatcodes \fi -\ifx\texcatcodes \undefined \newcatcodetable \texcatcodes \fi -\ifx\luacatcodes \undefined \newcatcodetable \luacatcodes \fi -\ifx\notcatcodes \undefined \newcatcodetable \notcatcodes \fi -\ifx\vrbcatcodes \undefined \newcatcodetable \vrbcatcodes \fi -\ifx\prtcatcodes \undefined \newcatcodetable \prtcatcodes \fi +\ifdefined\nilcatcodes \else \newcatcodetable \nilcatcodes \fi +\ifdefined\texcatcodes \else \newcatcodetable \texcatcodes \fi +\ifdefined\luacatcodes \else \newcatcodetable \luacatcodes \fi +\ifdefined\notcatcodes \else \newcatcodetable \notcatcodes \fi +\ifdefined\vrbcatcodes \else \newcatcodetable \vrbcatcodes \fi +\ifdefined\prtcatcodes \else \newcatcodetable \prtcatcodes \fi \startcatcodetable \nilcatcodes \catcode`\^^I = 10 % ascii tab is a blank space diff --git a/Master/texmf-dist/tex/context/base/catc-ini.lua b/Master/texmf-dist/tex/context/base/catc-ini.lua index e1558b459a0..2749f403c9c 100644 --- a/Master/texmf-dist/tex/context/base/catc-ini.lua +++ b/Master/texmf-dist/tex/context/base/catc-ini.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['catc-ini'] = { version = 1.001, - comment = "companion to catc-ini.tex", + comment = "companion to catc-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -17,7 +17,12 @@ storage.register("catcodes/names", catcodes.names, "catcodes.names") function catcodes.register(name,number) catcodes.numbers[name] = number - catcodes.names[number] = name + local cnn = catcodes.names[number] + if cnn then + cnn[#cnn+1] = name + else + catcodes.names[number] = { name } + end tex[name] = number end diff --git a/Master/texmf-dist/tex/context/base/catc-ini.mkiv b/Master/texmf-dist/tex/context/base/catc-ini.mkiv index 668a55d3bdd..269330a1bbd 100644 --- a/Master/texmf-dist/tex/context/base/catc-ini.mkiv +++ b/Master/texmf-dist/tex/context/base/catc-ini.mkiv @@ -70,7 +70,7 @@ \catcode`\% \othercatcode \catcode127 \othercatcode} -\long\def\startcatcodetable#1#2\stopcatcodetable +\long\normalprotected\def\startcatcodetable#1#2\stopcatcodetable {\bgroup \catcodetable\scratchcatcodes \the\setdefaultcatcodes @@ -80,7 +80,7 @@ \newcatcodetable\dummycatcodes -\long\def\startextendcatcodetable#1#2\stopextendcatcodetable +\long\normalprotected\def\startextendcatcodetable#1#2\stopextendcatcodetable {\bgroup \catcodetable#1\relax \globaldefs\plusone @@ -90,7 +90,7 @@ % == % -% \long\def\startextendcatcodetable#1#2\stopextendcatcodetable +% \long\normalprotected\def\startextendcatcodetable#1#2\stopextendcatcodetable % {\bgroup % \scratchcounter\the\catcodetable % \catcodetable #1 #2 @@ -107,7 +107,9 @@ %D The next command can be defined in a cleaner way in the %D Mk IV file but we want to have a fast one with a minimal -%D chance for interference. +%D chance for interference. Do we still need this complex +%D mechanism? Future versions of \MKIV\ might only use +%D active characters for very special cases. \chardef\activehackcode=`\~ @@ -128,7 +130,7 @@ \def\letcatcodecommandc % only first time {\expandafter\gdef\csname CCL:\number\cctcountera:\number\cctcounterb\expandafter\endcsname\expandafter {\expandafter\let\csname CCC:\number\cctcountera:\number\cctcounterb\endcsname}% - \reinstatecatcodecommanda + \reinstatecatcodecommandua % unexpanded \csname CCL:\number\cctcountera:\number\cctcounterb\endcsname} % expandable def @@ -165,8 +167,8 @@ \def\uedcatcodecommandc % only first time {\expandafter\gdef\csname CCU:\number\cctcountera:\number\cctcounterb\expandafter\endcsname \expandafter##\expandafter1\expandafter - {\expandafter\unexpanded\expandafter\def\csname CCC:\number\cctcountera:\number\cctcounterb\endcsname{##1}}% - \reinstatecatcodecommanda + {\expandafter\normalprotected\expandafter\def\csname CCC:\number\cctcountera:\number\cctcounterb\endcsname{##1}}% + \reinstatecatcodecommandua % unexpanded \csname CCU:\number\cctcountera:\number\cctcounterb\endcsname} \def\reinstatecatcodecommand{\afterassignment\reinstatecatcodecommanda\cctcounterb} @@ -178,6 +180,13 @@ \uppercase{\xdef~{\noexpand\catcodecommand{\number\cctcounterb}}}% \egroup} +\def\reinstatecatcodecommandua % can be used when a direct definition has been done + {\bgroup % and the selector has been lost + \uccode\activehackcode\cctcounterb + \catcode\uccode\activehackcode\activecatcode + \uppercase{\normalprotected\xdef~{\noexpand\catcodecommand{\number\cctcounterb}}}% + \egroup} + \chardef\defaultcatcodetable\zerocount \def\catcodecommand#1% @@ -270,5 +279,39 @@ % \tracecatcodetables +%D Only in \MKIV\ (to be used when crossing pages with changed catcodes +%D in the current vector): +%D +%D \starttyping +%D \normalprotected\def\startcrap +%D {\bgroup +%D \pushcatcodes +%D \whitespace +%D \obeylines +%D \activatespacehandler\v!yes +%D \strut} +%D +%D \normalprotected\def\stopcrap +%D {\popcatcodes +%D \egroup} +%D \stoptyping + +\newcount\catcodetablelevel + +\def\pushcatcodes + {\bgroup + \global\advance\catcodetablelevel\plusone + \ifcsname @@ccf:\number\catcodetablelevel\endcsname \else + \global\advance\cctdefcounter\plusone + \expandafter\global\expandafter\chardef\csname @@ccf:\number\catcodetablelevel\endcsname\cctdefcounter + \fi + \catcodetable\ctxcatcodes + \expandafter\savecatcodetable\csname @@ccf:\number\catcodetablelevel\endcsname + \egroup + \expandafter\catcodetable\csname @@ccf:\number\catcodetablelevel\endcsname} + +\def\popcatcodes + {\global\advance\catcodetablelevel\minusone} + \endinput diff --git a/Master/texmf-dist/tex/context/base/char-act.mkiv b/Master/texmf-dist/tex/context/base/char-act.mkiv index 34358784a2a..2dc7823f0bf 100644 --- a/Master/texmf-dist/tex/context/base/char-act.mkiv +++ b/Master/texmf-dist/tex/context/base/char-act.mkiv @@ -102,6 +102,19 @@ \prettynaturalfont{\scantextokens\expandafter{\ascii}\ifhmode\unskip\fi}% \egroup} +%D What-a-mess: + +% \def\normalspacecodes +% {\catcode`\^^I = 10 +% \catcode`\^^M = 5 +% \catcode`\^^L = 5 +% \catcode`\ = 10 +% \catcode`\^^Z = 9 } + +% \appendtoks +% \normalspacecodes +% \to \everybeforeoutput + \endinput \protect % obsolete (old hack for idris) diff --git a/Master/texmf-dist/tex/context/base/char-cmp.lua b/Master/texmf-dist/tex/context/base/char-cmp.lua index bd33604996a..c7deb79010f 100644 --- a/Master/texmf-dist/tex/context/base/char-cmp.lua +++ b/Master/texmf-dist/tex/context/base/char-cmp.lua @@ -1,12 +1,13 @@ if not modules then modules = { } end modules ['char-cmp'] = { version = 1.001, - comment = "companion to char-ini.tex", + comment = "companion to char-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local utf = unicode.utf8 +local unpack = unpack or table.unpack characters = characters or { } characters.uncomposed = characters.uncomposed or { } diff --git a/Master/texmf-dist/tex/context/base/char-def.lua b/Master/texmf-dist/tex/context/base/char-def.lua index 665572ee81f..b7abee0fbf6 100644 --- a/Master/texmf-dist/tex/context/base/char-def.lua +++ b/Master/texmf-dist/tex/context/base/char-def.lua @@ -1,5 +1,6 @@ if not modules then modules = { } end modules ['char-def'] = { version = 1.001, + comment = "companion to char-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -586,6 +587,7 @@ characters.data={ adobename="at", category="po", cjkwd="na", + contextname="textat", description="COMMERCIAL AT", direction="on", linebreak="al", @@ -885,7 +887,7 @@ characters.data={ direction="on", linebreak="op", mathclass="open", - mathname="lbracket", + mathname="lbrack", mirror=0x005D, unicodeslot=0x005B, }, @@ -909,7 +911,7 @@ characters.data={ direction="on", linebreak="cl", mathclass="close", - mathname="rbracket", + mathname="rbrack", mirror=0x005B, unicodeslot=0x005D, }, @@ -8710,6 +8712,8 @@ characters.data={ description="GREEK CAPITAL LETTER ALPHA", direction="l", lccode=0x03B1, + mathclass="variable", + mathname="Alpha", linebreak="al", unicodeslot=0x0391, }, @@ -8721,6 +8725,8 @@ characters.data={ description="GREEK CAPITAL LETTER BETA", direction="l", lccode=0x03B2, + mathclass="variable", + mathname="Beta", linebreak="al", unicodeslot=0x0392, }, @@ -8731,6 +8737,8 @@ characters.data={ contextname="greekGamma", description="GREEK CAPITAL LETTER GAMMA", direction="l", + mathclass="variable", + mathname="Gamma", lccode=0x03B3, linebreak="al", unicodeslot=0x0393, @@ -8741,6 +8749,8 @@ characters.data={ contextname="greekDelta", description="GREEK CAPITAL LETTER DELTA", direction="l", + mathclass="variable", + mathname="Delta", lccode=0x03B4, linebreak="al", unicodeslot=0x0394, @@ -8752,6 +8762,8 @@ characters.data={ contextname="greekEpsilon", description="GREEK CAPITAL LETTER EPSILON", direction="l", + mathclass="variable", + mathname="Epsilon", lccode=0x03B5, linebreak="al", unicodeslot=0x0395, @@ -8763,6 +8775,8 @@ characters.data={ contextname="greekZeta", description="GREEK CAPITAL LETTER ZETA", direction="l", + mathclass="variable", + mathname="Zeta", lccode=0x03B6, linebreak="al", unicodeslot=0x0396, @@ -8774,6 +8788,8 @@ characters.data={ contextname="greekEta", description="GREEK CAPITAL LETTER ETA", direction="l", + mathclass="variable", + mathname="Eta", lccode=0x03B7, linebreak="al", unicodeslot=0x0397, @@ -8785,6 +8801,8 @@ characters.data={ contextname="greekTheta", description="GREEK CAPITAL LETTER THETA", direction="l", + mathclass="variable", + mathname="Theta", lccode=0x03B8, linebreak="al", unicodeslot=0x0398, @@ -8796,6 +8814,8 @@ characters.data={ contextname="greekIota", description="GREEK CAPITAL LETTER IOTA", direction="l", + mathclass="variable", + mathname="Iota", lccode=0x03B9, linebreak="al", unicodeslot=0x0399, @@ -8807,6 +8827,8 @@ characters.data={ contextname="greekKappa", description="GREEK CAPITAL LETTER KAPPA", direction="l", + mathclass="variable", + mathname="Kappa", lccode=0x03BA, linebreak="al", unicodeslot=0x039A, @@ -8818,6 +8840,8 @@ characters.data={ contextname="greekLambda", description="GREEK CAPITAL LETTER LAMDA", direction="l", + mathclass="variable", + mathname="Lambda", lccode=0x03BB, linebreak="al", unicodeslot=0x039B, @@ -8829,6 +8853,8 @@ characters.data={ contextname="greekMu", description="GREEK CAPITAL LETTER MU", direction="l", + mathclass="variable", + mathname="Mu", lccode=0x03BC, linebreak="al", unicodeslot=0x039C, @@ -8840,6 +8866,8 @@ characters.data={ contextname="greekNu", description="GREEK CAPITAL LETTER NU", direction="l", + mathclass="variable", + mathname="Nu", lccode=0x03BD, linebreak="al", unicodeslot=0x039D, @@ -8851,6 +8879,8 @@ characters.data={ contextname="greekXi", description="GREEK CAPITAL LETTER XI", direction="l", + mathclass="variable", + mathname="Xi", lccode=0x03BE, linebreak="al", unicodeslot=0x039E, @@ -8862,6 +8892,8 @@ characters.data={ contextname="greekOmicron", description="GREEK CAPITAL LETTER OMICRON", direction="l", + mathclass="variable", + mathname="Omicron", lccode=0x03BF, linebreak="al", unicodeslot=0x039F, @@ -8873,6 +8905,8 @@ characters.data={ contextname="greekPi", description="GREEK CAPITAL LETTER PI", direction="l", + mathclass="variable", + mathname="Pi", lccode=0x03C0, linebreak="al", unicodeslot=0x03A0, @@ -8884,6 +8918,8 @@ characters.data={ contextname="greekRho", description="GREEK CAPITAL LETTER RHO", direction="l", + mathclass="variable", + mathname="Rho", lccode=0x03C1, linebreak="al", unicodeslot=0x03A1, @@ -8895,6 +8931,8 @@ characters.data={ contextname="greekSigma", description="GREEK CAPITAL LETTER SIGMA", direction="l", + mathclass="variable", + mathname="Sigma", lccode=0x03C3, linebreak="al", unicodeslot=0x03A3, @@ -8906,6 +8944,8 @@ characters.data={ contextname="greekTau", description="GREEK CAPITAL LETTER TAU", direction="l", + mathclass="variable", + mathname="Tau", lccode=0x03C4, linebreak="al", unicodeslot=0x03A4, @@ -8917,6 +8957,8 @@ characters.data={ contextname="greekUpsilon", description="GREEK CAPITAL LETTER UPSILON", direction="l", + mathclass="variable", + mathname="Upsilon", lccode=0x03C5, linebreak="al", unicodeslot=0x03A5, @@ -8928,6 +8970,8 @@ characters.data={ contextname="greekPhi", description="GREEK CAPITAL LETTER PHI", direction="l", + mathclass="variable", + mathname="Phi", lccode=0x03C6, linebreak="al", unicodeslot=0x03A6, @@ -8939,6 +8983,8 @@ characters.data={ contextname="greekChi", description="GREEK CAPITAL LETTER CHI", direction="l", + mathclass="variable", + mathname="Chi", lccode=0x03C7, linebreak="al", unicodeslot=0x03A7, @@ -8950,6 +8996,8 @@ characters.data={ contextname="greekPsi", description="GREEK CAPITAL LETTER PSI", direction="l", + mathclass="variable", + mathname="Psi", lccode=0x03C8, linebreak="al", unicodeslot=0x03A8, @@ -8960,6 +9008,8 @@ characters.data={ contextname="greekOmega", description="GREEK CAPITAL LETTER OMEGA", direction="l", + mathclass="variable", + mathname="Omega", lccode=0x03C9, linebreak="al", unicodeslot=0x03A9, @@ -9055,6 +9105,8 @@ characters.data={ description="GREEK SMALL LETTER ALPHA", direction="l", linebreak="al", + mathclass="variable", + mathname="alpha", uccode=0x0391, unicodeslot=0x03B1, }, @@ -9066,6 +9118,8 @@ characters.data={ description="GREEK SMALL LETTER BETA", direction="l", linebreak="al", + mathclass="variable", + mathname="beta", uccode=0x0392, unicodeslot=0x03B2, }, @@ -9077,6 +9131,8 @@ characters.data={ description="GREEK SMALL LETTER GAMMA", direction="l", linebreak="al", + mathclass="variable", + mathname="gamma", uccode=0x0393, unicodeslot=0x03B3, }, @@ -9088,6 +9144,8 @@ characters.data={ description="GREEK SMALL LETTER DELTA", direction="l", linebreak="al", + mathclass="variable", + mathname="delta", uccode=0x0394, unicodeslot=0x03B4, }, @@ -9099,6 +9157,8 @@ characters.data={ description="GREEK SMALL LETTER EPSILON", direction="l", linebreak="al", + mathclass="variable", + mathname="varepsilon", uccode=0x0395, unicodeslot=0x03B5, }, @@ -9110,6 +9170,8 @@ characters.data={ description="GREEK SMALL LETTER ZETA", direction="l", linebreak="al", + mathclass="variable", + mathname="zeta", uccode=0x0396, unicodeslot=0x03B6, }, @@ -9121,6 +9183,8 @@ characters.data={ description="GREEK SMALL LETTER ETA", direction="l", linebreak="al", + mathclass="variable", + mathname="eta", uccode=0x0397, unicodeslot=0x03B7, }, @@ -9132,6 +9196,8 @@ characters.data={ description="GREEK SMALL LETTER THETA", direction="l", linebreak="al", + mathclass="variable", + mathname="theta", uccode=0x0398, unicodeslot=0x03B8, }, @@ -9143,6 +9209,8 @@ characters.data={ description="GREEK SMALL LETTER IOTA", direction="l", linebreak="al", + mathclass="variable", + mathname="iota", uccode=0x0399, unicodeslot=0x03B9, }, @@ -9154,6 +9222,8 @@ characters.data={ description="GREEK SMALL LETTER KAPPA", direction="l", linebreak="al", + mathclass="variable", + mathname="kappa", uccode=0x039A, unicodeslot=0x03BA, }, @@ -9165,6 +9235,8 @@ characters.data={ description="GREEK SMALL LETTER LAMDA", direction="l", linebreak="al", + mathclass="variable", + mathname="lambda", uccode=0x039B, unicodeslot=0x03BB, }, @@ -9175,6 +9247,8 @@ characters.data={ description="GREEK SMALL LETTER MU", direction="l", linebreak="al", + mathclass="variable", + mathname="mu", uccode=0x039C, unicodeslot=0x03BC, }, @@ -9186,6 +9260,8 @@ characters.data={ description="GREEK SMALL LETTER NU", direction="l", linebreak="al", + mathclass="variable", + mathname="nu", uccode=0x039D, unicodeslot=0x03BD, }, @@ -9197,6 +9273,8 @@ characters.data={ description="GREEK SMALL LETTER XI", direction="l", linebreak="al", + mathclass="variable", + mathname="xi", uccode=0x039E, unicodeslot=0x03BE, }, @@ -9208,6 +9286,8 @@ characters.data={ description="GREEK SMALL LETTER OMICRON", direction="l", linebreak="al", + mathclass="variable", + mathname="omicron", uccode=0x039F, unicodeslot=0x03BF, }, @@ -9219,6 +9299,8 @@ characters.data={ description="GREEK SMALL LETTER PI", direction="l", linebreak="al", + mathclass="variable", + mathname="pi", uccode=0x03A0, unicodeslot=0x03C0, }, @@ -9230,6 +9312,8 @@ characters.data={ description="GREEK SMALL LETTER RHO", direction="l", linebreak="al", + mathclass="variable", + mathname="rho", uccode=0x03A1, unicodeslot=0x03C1, }, @@ -9240,6 +9324,8 @@ characters.data={ description="GREEK SMALL LETTER FINAL SIGMA", direction="l", linebreak="al", + mathclass="variable", + mathname="varsigma", uccode=0x03A3, unicodeslot=0x03C2, }, @@ -9251,6 +9337,8 @@ characters.data={ description="GREEK SMALL LETTER SIGMA", direction="l", linebreak="al", + mathclass="variable", + mathname="sigma", uccode=0x03A3, unicodeslot=0x03C3, }, @@ -9262,6 +9350,8 @@ characters.data={ description="GREEK SMALL LETTER TAU", direction="l", linebreak="al", + mathclass="variable", + mathname="tau", uccode=0x03A4, unicodeslot=0x03C4, }, @@ -9273,6 +9363,8 @@ characters.data={ description="GREEK SMALL LETTER UPSILON", direction="l", linebreak="al", + mathclass="variable", + mathname="upsilon", uccode=0x03A5, unicodeslot=0x03C5, }, @@ -9284,6 +9376,8 @@ characters.data={ description="GREEK SMALL LETTER PHI", direction="l", linebreak="al", + mathclass="variable", + mathname="varphi", uccode=0x03A6, unicodeslot=0x03C6, }, @@ -9295,6 +9389,8 @@ characters.data={ description="GREEK SMALL LETTER CHI", direction="l", linebreak="al", + mathclass="variable", + mathname="chi", uccode=0x03A7, unicodeslot=0x03C7, }, @@ -9306,6 +9402,8 @@ characters.data={ description="GREEK SMALL LETTER PSI", direction="l", linebreak="al", + mathclass="variable", + mathname="psi", uccode=0x03A8, unicodeslot=0x03C8, }, @@ -9317,6 +9415,8 @@ characters.data={ description="GREEK SMALL LETTER OMEGA", direction="l", linebreak="al", + mathclass="variable", + mathname="omega", uccode=0x03A9, unicodeslot=0x03C9, }, @@ -9396,6 +9496,8 @@ characters.data={ description="GREEK THETA SYMBOL", direction="l", linebreak="al", + mathclass="variable", + mathname="varTheta", specials={ "compat", 0x03B8 }, uccode=0x0398, unicodeslot=0x03D1, @@ -9433,6 +9535,8 @@ characters.data={ direction="l", linebreak="al", specials={ "compat", 0x03C6 }, + mathclass="variable", + mathname="phi", uccode=0x03A6, unicodeslot=0x03D5, }, @@ -9443,6 +9547,8 @@ characters.data={ description="GREEK PI SYMBOL", direction="l", linebreak="al", + mathclass="variable", + mathname="varpi", specials={ "compat", 0x03C0 }, uccode=0x03A0, unicodeslot=0x03D6, @@ -9706,6 +9812,8 @@ characters.data={ description="GREEK LUNATE EPSILON SYMBOL", direction="l", linebreak="al", + mathclass="variable", + mathname="epsilon", specials={ "compat", 0x03B5 }, uccode=0x0395, unicodeslot=0x03F5, @@ -13907,6 +14015,7 @@ characters.data={ category="cf", description="ARABIC NUMBER SIGN", direction="an", + visible="yes", linebreak="al", unicodeslot=0x0600, }, @@ -13914,6 +14023,7 @@ characters.data={ category="cf", description="ARABIC SIGN SANAH", direction="an", + visible="yes", linebreak="al", unicodeslot=0x0601, }, @@ -13921,6 +14031,7 @@ characters.data={ category="cf", description="ARABIC FOOTNOTE MARKER", direction="an", + visible="yes", linebreak="al", unicodeslot=0x0602, }, @@ -13928,6 +14039,7 @@ characters.data={ category="cf", description="ARABIC SIGN SAFHA", direction="an", + visible="yes", linebreak="al", unicodeslot=0x0603, }, @@ -15448,6 +15560,7 @@ characters.data={ category="cf", description="ARABIC END OF AYAH", direction="an", + visible="yes", linebreak="al", unicodeslot=0x06DD, }, @@ -15797,6 +15910,7 @@ characters.data={ category="cf", description="SYRIAC ABBREVIATION MARK", direction="bn", + visible="yes", linebreak="al", unicodeslot=0x070F, }, @@ -48736,6 +48850,7 @@ characters.data={ [0x200C]={ adobename="afii61664", category="cf", + contextname="zwnj", description="ZERO WIDTH NON-JOINER", direction="bn", linebreak="cm", @@ -48744,6 +48859,7 @@ characters.data={ [0x200D]={ adobename="afii301", category="cf", + contextname="zwj", description="ZERO WIDTH JOINER", direction="bn", linebreak="cm", @@ -49444,7 +49560,7 @@ characters.data={ [0x2061]={ category="cf", comment="maybe: nulloperator", - contextcommand="relax", + contextname="relax", description="FUNCTION APPLICATION", direction="bn", linebreak="al", @@ -50038,6 +50154,8 @@ characters.data={ description="COMBINING THREE DOTS ABOVE", direction="nsm", linebreak="cm", + mathclass="accent", + mathname="dddot", unicodeslot=0x20DB, }, [0x20DC]={ @@ -59877,6 +59995,8 @@ characters.data={ description="BLACK RIGHT-POINTING TRIANGLE", direction="on", linebreak="ai", + mathclass="bin", + mathname="blacktriangleright", unicodeslot=0x25B6, }, [0x25B7]={ @@ -59892,6 +60012,8 @@ characters.data={ description="BLACK RIGHT-POINTING SMALL TRIANGLE", direction="on", linebreak="al", + mathclass="bin", + mathname="blacktriangleleft", unicodeslot=0x25B8, }, [0x25B9]={ @@ -108416,6 +108538,7 @@ characters.data={ category="cf", description="INTERLINEAR ANNOTATION ANCHOR", direction="on", + visible="yes", linebreak="cm", unicodeslot=0xFFF9, }, @@ -108423,6 +108546,7 @@ characters.data={ category="cf", description="INTERLINEAR ANNOTATION SEPARATOR", direction="on", + visible="yes", linebreak="cm", unicodeslot=0xFFFA, }, @@ -108430,6 +108554,7 @@ characters.data={ category="cf", description="INTERLINEAR ANNOTATION TERMINATOR", direction="on", + visible="yes", linebreak="cm", unicodeslot=0xFFFB, }, @@ -126093,6 +126218,7 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL V", direction="l", linebreak="al", +--mathclass="variable", specials={ "font", 0x0056 }, unicodeslot=0x1D449, }, @@ -131209,8 +131335,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL ALPHA", direction="l", linebreak="al", - mathclass="variable", - mathname="Alpha", specials={ "font", 0x0391 }, unicodeslot=0x1D6E2, }, @@ -131219,8 +131343,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL BETA", direction="l", linebreak="al", - mathclass="variable", - mathname="Beta", specials={ "font", 0x0392 }, unicodeslot=0x1D6E3, }, @@ -131229,8 +131351,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL GAMMA", direction="l", linebreak="al", - mathclass="variable", - mathname="Gamma", specials={ "font", 0x0393 }, unicodeslot=0x1D6E4, }, @@ -131239,8 +131359,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL DELTA", direction="l", linebreak="al", - mathclass="variable", - mathname="Delta", specials={ "font", 0x0394 }, unicodeslot=0x1D6E5, }, @@ -131249,8 +131367,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL EPSILON", direction="l", linebreak="al", - mathclass="variable", - mathname="Epsilon", specials={ "font", 0x0395 }, unicodeslot=0x1D6E6, }, @@ -131259,8 +131375,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL ZETA", direction="l", linebreak="al", - mathclass="variable", - mathname="Zeta", specials={ "font", 0x0396 }, unicodeslot=0x1D6E7, }, @@ -131269,8 +131383,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL ETA", direction="l", linebreak="al", - mathclass="variable", - mathname="Eta", specials={ "font", 0x0397 }, unicodeslot=0x1D6E8, }, @@ -131279,8 +131391,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL THETA", direction="l", linebreak="al", - mathclass="variable", - mathname="Theta", specials={ "font", 0x0398 }, unicodeslot=0x1D6E9, }, @@ -131289,8 +131399,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL IOTA", direction="l", linebreak="al", - mathclass="variable", - mathname="Iota", specials={ "font", 0x0399 }, unicodeslot=0x1D6EA, }, @@ -131299,8 +131407,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL KAPPA", direction="l", linebreak="al", - mathclass="variable", - mathname="Kappa", specials={ "font", 0x039A }, unicodeslot=0x1D6EB, }, @@ -131309,8 +131415,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL LAMDA", direction="l", linebreak="al", - mathclass="variable", - mathname="Lambda", specials={ "font", 0x039B }, unicodeslot=0x1D6EC, }, @@ -131319,8 +131423,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL MU", direction="l", linebreak="al", - mathclass="variable", - mathname="Mu", specials={ "font", 0x039C }, unicodeslot=0x1D6ED, }, @@ -131329,8 +131431,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL NU", direction="l", linebreak="al", - mathclass="variable", - mathname="Nu", specials={ "font", 0x039D }, unicodeslot=0x1D6EE, }, @@ -131339,8 +131439,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL XI", direction="l", linebreak="al", - mathclass="variable", - mathname="Xi", specials={ "font", 0x039E }, unicodeslot=0x1D6EF, }, @@ -131349,8 +131447,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL OMICRON", direction="l", linebreak="al", - mathclass="variable", - mathname="Omicron", specials={ "font", 0x039F }, unicodeslot=0x1D6F0, }, @@ -131359,8 +131455,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL PI", direction="l", linebreak="al", - mathclass="variable", - mathname="Pi", specials={ "font", 0x03A0 }, unicodeslot=0x1D6F1, }, @@ -131369,8 +131463,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL RHO", direction="l", linebreak="al", - mathclass="variable", - mathname="Rho", specials={ "font", 0x03A1 }, unicodeslot=0x1D6F2, }, @@ -131379,8 +131471,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL THETA SYMBOL", direction="l", linebreak="al", - mathclass="variable", - mathname="varTheta", specials={ "font", 0x03F4 }, unicodeslot=0x1D6F3, }, @@ -131389,8 +131479,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL SIGMA", direction="l", linebreak="al", - mathclass="variable", - mathname="Sigma", specials={ "font", 0x03A3 }, unicodeslot=0x1D6F4, }, @@ -131399,8 +131487,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL TAU", direction="l", linebreak="al", - mathclass="variable", - mathname="Tau", specials={ "font", 0x03A4 }, unicodeslot=0x1D6F5, }, @@ -131409,8 +131495,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL UPSILON", direction="l", linebreak="al", - mathclass="variable", - mathname="Upsilon", specials={ "font", 0x03A5 }, unicodeslot=0x1D6F6, }, @@ -131419,8 +131503,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL PHI", direction="l", linebreak="al", - mathclass="variable", - mathname="Phi", specials={ "font", 0x03A6 }, unicodeslot=0x1D6F7, }, @@ -131429,8 +131511,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL CHI", direction="l", linebreak="al", - mathclass="variable", - mathname="Chi", specials={ "font", 0x03A7 }, unicodeslot=0x1D6F8, }, @@ -131439,8 +131519,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL PSI", direction="l", linebreak="al", - mathclass="variable", - mathname="Psi", specials={ "font", 0x03A8 }, unicodeslot=0x1D6F9, }, @@ -131449,8 +131527,6 @@ characters.data={ description="MATHEMATICAL ITALIC CAPITAL OMEGA", direction="l", linebreak="al", - mathclass="variable", - mathname="Omega", specials={ "font", 0x03A9 }, unicodeslot=0x1D6FA, }, @@ -131469,8 +131545,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL ALPHA", direction="l", linebreak="al", - mathclass="default", - mathname="alpha", specials={ "font", 0x03B1 }, unicodeslot=0x1D6FC, }, @@ -131479,8 +131553,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL BETA", direction="l", linebreak="al", - mathclass="default", - mathname="beta", specials={ "font", 0x03B2 }, unicodeslot=0x1D6FD, }, @@ -131489,8 +131561,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL GAMMA", direction="l", linebreak="al", - mathclass="default", - mathname="gamma", specials={ "font", 0x03B3 }, unicodeslot=0x1D6FE, }, @@ -131499,8 +131569,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL DELTA", direction="l", linebreak="al", - mathclass="default", - mathname="delta", specials={ "font", 0x03B4 }, unicodeslot=0x1D6FF, }, @@ -131509,8 +131577,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL EPSILON", direction="l", linebreak="al", - mathclass="default", - mathname="varepsilon", specials={ "font", 0x03B5 }, unicodeslot=0x1D700, }, @@ -131519,8 +131585,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL ZETA", direction="l", linebreak="al", - mathclass="default", - mathname="zeta", specials={ "font", 0x03B6 }, unicodeslot=0x1D701, }, @@ -131529,8 +131593,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL ETA", direction="l", linebreak="al", - mathclass="default", - mathname="eta", specials={ "font", 0x03B7 }, unicodeslot=0x1D702, }, @@ -131539,8 +131601,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL THETA", direction="l", linebreak="al", - mathclass="default", - mathname="theta", specials={ "font", 0x03B8 }, unicodeslot=0x1D703, }, @@ -131549,8 +131609,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL IOTA", direction="l", linebreak="al", - mathclass="default", - mathname="iota", specials={ "font", 0x03B9 }, unicodeslot=0x1D704, }, @@ -131559,8 +131617,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL KAPPA", direction="l", linebreak="al", - mathclass="default", - mathname="kappa", specials={ "font", 0x03BA }, unicodeslot=0x1D705, }, @@ -131569,8 +131625,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL LAMDA", direction="l", linebreak="al", - mathclass="default", - mathname="lambda", specials={ "font", 0x03BB }, unicodeslot=0x1D706, }, @@ -131579,8 +131633,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL MU", direction="l", linebreak="al", - mathclass="default", - mathname="mu", specials={ "font", 0x03BC }, unicodeslot=0x1D707, }, @@ -131589,8 +131641,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL NU", direction="l", linebreak="al", - mathclass="default", - mathname="nu", specials={ "font", 0x03BD }, unicodeslot=0x1D708, }, @@ -131599,8 +131649,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL XI", direction="l", linebreak="al", - mathclass="default", - mathname="xi", specials={ "font", 0x03BE }, unicodeslot=0x1D709, }, @@ -131609,8 +131657,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL OMICRON", direction="l", linebreak="al", - mathclass="default", - mathname="omicron", specials={ "font", 0x03BF }, unicodeslot=0x1D70A, }, @@ -131619,8 +131665,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL PI", direction="l", linebreak="al", - mathclass="default", - mathname="pi", specials={ "font", 0x03C0 }, unicodeslot=0x1D70B, }, @@ -131629,8 +131673,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL RHO", direction="l", linebreak="al", - mathclass="default", - mathname="rho", specials={ "font", 0x03C1 }, unicodeslot=0x1D70C, }, @@ -131639,8 +131681,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL FINAL SIGMA", direction="l", linebreak="al", - mathclass="default", - mathname="varsigma", specials={ "font", 0x03C2 }, unicodeslot=0x1D70D, }, @@ -131649,8 +131689,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL SIGMA", direction="l", linebreak="al", - mathclass="default", - mathname="sigma", specials={ "font", 0x03C3 }, unicodeslot=0x1D70E, }, @@ -131659,8 +131697,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL TAU", direction="l", linebreak="al", - mathclass="default", - mathname="tau", specials={ "font", 0x03C4 }, unicodeslot=0x1D70F, }, @@ -131669,8 +131705,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL UPSILON", direction="l", linebreak="al", - mathclass="default", - mathname="upsilon", specials={ "font", 0x03C5 }, unicodeslot=0x1D710, }, @@ -131679,8 +131713,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL PHI", direction="l", linebreak="al", - mathclass="default", - mathname="varphi", specials={ "font", 0x03C6 }, unicodeslot=0x1D711, }, @@ -131689,8 +131721,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL CHI", direction="l", linebreak="al", - mathclass="default", - mathname="chi", specials={ "font", 0x03C7 }, unicodeslot=0x1D712, }, @@ -131699,8 +131729,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL PSI", direction="l", linebreak="al", - mathclass="default", - mathname="psi", specials={ "font", 0x03C8 }, unicodeslot=0x1D713, }, @@ -131709,8 +131737,6 @@ characters.data={ description="MATHEMATICAL ITALIC SMALL OMEGA", direction="l", linebreak="al", - mathclass="default", - mathname="omega", specials={ "font", 0x03C9 }, unicodeslot=0x1D714, }, @@ -131727,8 +131753,6 @@ characters.data={ description="MATHEMATICAL ITALIC EPSILON SYMBOL", direction="l", linebreak="al", - mathclass="default", - mathname="epsilon", specials={ "font", 0x03F5 }, unicodeslot=0x1D716, }, @@ -131757,8 +131781,6 @@ characters.data={ description="MATHEMATICAL ITALIC PHI SYMBOL", direction="l", linebreak="al", - mathclass="default", - mathname="phi", specials={ "font", 0x03D5 }, unicodeslot=0x1D719, }, @@ -131767,7 +131789,7 @@ characters.data={ description="MATHEMATICAL ITALIC RHO SYMBOL", direction="l", linebreak="al", - mathclass="default", + mathclass="variable", mathname="varrho", specials={ "font", 0x03F1 }, unicodeslot=0x1D71A, @@ -131777,8 +131799,6 @@ characters.data={ description="MATHEMATICAL ITALIC PI SYMBOL", direction="l", linebreak="al", - mathclass="default", - mathname="varpi", specials={ "font", 0x03D6 }, unicodeslot=0x1D71B, }, diff --git a/Master/texmf-dist/tex/context/base/char-enc.lua b/Master/texmf-dist/tex/context/base/char-enc.lua index a4e5ac77d58..45f404ee985 100644 --- a/Master/texmf-dist/tex/context/base/char-enc.lua +++ b/Master/texmf-dist/tex/context/base/char-enc.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['char-syn'] = { version = 1.001, - comment = "companion to char-ini.tex", + comment = "companion to char-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" diff --git a/Master/texmf-dist/tex/context/base/char-ini.lua b/Master/texmf-dist/tex/context/base/char-ini.lua index ff42d91eeaa..5c4a40bad51 100644 --- a/Master/texmf-dist/tex/context/base/char-ini.lua +++ b/Master/texmf-dist/tex/context/base/char-ini.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['char-ini'] = { version = 1.001, - comment = "companion to char-ini.tex", + comment = "companion to char-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -11,7 +11,7 @@ xml = xml or { } local utf = unicode.utf8 -local utfchar, utfbyte = utf.char, utf.byte +local utfchar, utfbyte, utfvalues = utf.char, utf.byte, string.utfvalues local concat = table.concat local next, tonumber = next, tonumber local texsprint, texprint = tex.sprint, tex.print @@ -422,6 +422,7 @@ function characters.define(tobelettered, tobeactivated) -- catcodetables end end elseif is_command[category] then + -- this might change: contextcommand ipv contextname -- texprint(format("{\\catcode %s=13\\unexpanded\\gdef %s{\\%s}}",u,utfchar(u),contextname)) texsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}") -- no texprint activated[#activated+1] = "\\c"..u.."\\a" @@ -607,28 +608,91 @@ function characters.n_is_of_category(n,category) -- by name (string) return cd and cd.category == category end --- xml support - -characters.active_offset = 0x10000 - -xml.entities = xml.entities or { } - -storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml +-- xml support (moved) function characters.remapentity(chr,slot) texsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr)) end -function characters.setmkiventities() - local entities = xml.entities - entities.lt = "<" - entities.amp = "&" - entities.gt = ">" +characters.active_offset = 0x10000 -- there will be remapped in that byte range + +-- xml.entities = xml.entities or { } +-- +-- storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml +-- +-- function characters.setmkiventities() +-- local entities = xml.entities +-- entities.lt = "<" +-- entities.amp = "&" +-- entities.gt = ">" +-- end +-- +-- function characters.setmkiientities() +-- local entities = xml.entities +-- entities.lt = utfchar(characters.active_offset + utfbyte("<")) +-- entities.amp = utfchar(characters.active_offset + utfbyte("&")) +-- entities.gt = utfchar(characters.active_offset + utfbyte(">")) +-- end + +-- some day we will make a table + +function characters.lower(str) + local new = { } + for u in utfvalues(str) do + new[#new+1] = utfchar(data[u].lccode or u) + end + return concat(new) end -function characters.setmkiientities() - local entities = xml.entities - entities.lt = utfchar(characters.active_offset + utfbyte("<")) - entities.amp = utfchar(characters.active_offset + utfbyte("&")) - entities.gt = utfchar(characters.active_offset + utfbyte(">")) +function characters.upper(str) + local new = { } + for u in utfvalues(str) do + new[#new+1] = utfchar(data[u].uccode or u) + end + return concat(new) end + +-- -- some day we might go this route, but it does not really save that much +-- -- so not now (we can generate a lot using mtx-unicode that operates on the +-- -- database) +-- +-- -- category cjkwd direction linebreak +-- +-- -- adobename comment contextcommand contextname description fallback lccode +-- -- mathclass mathfiller mathname mathspec mathstretch mathsymbol mirror +-- -- range shcode specials uccode uccodes unicodeslot +-- +-- local data = { +-- ['one']={ +-- common = { +-- category="cc", +-- direction="bn", +-- linebreak="cm", +-- }, +-- vector = { +-- [0x0000] = { +-- description="NULL", +-- group='one', +-- unicodeslot=0x0000, +-- }, +-- { +-- description="START OF HEADING", +-- group='one', +-- unicodeslot=0x0001, +-- }, +-- } +-- } +-- } +-- +-- local chardata, groupdata = { }, { } +-- +-- for group, gdata in next, data do +-- local common, vector = { __index = gdata.common }, gdata.vector +-- for character, cdata in next, vector do +-- chardata[character] = cdata +-- setmetatable(cdata,common) +-- end +-- groupdata[group] = gdata +-- end + +--~ characters.data, characters.groups = chardata, groupdata diff --git a/Master/texmf-dist/tex/context/base/char-ini.mkiv b/Master/texmf-dist/tex/context/base/char-ini.mkiv index daa20e72867..0d5e16bb04b 100644 --- a/Master/texmf-dist/tex/context/base/char-ini.mkiv +++ b/Master/texmf-dist/tex/context/base/char-ini.mkiv @@ -17,6 +17,7 @@ \registerctxluafile{char-ini}{1.001} \registerctxluafile{char-cmp}{1.001} % maybe we will load this someplace else \registerctxluafile{char-map}{1.001} % maybe we will load this someplace else +\registerctxluafile{char-tex}{1.001} \unprotect @@ -69,6 +70,7 @@ \number\xmlcatcodesr, } ) + catcodes.register("xmlcatcodes",\number\xmlcatcodes) } \protect \endinput diff --git a/Master/texmf-dist/tex/context/base/char-map.lua b/Master/texmf-dist/tex/context/base/char-map.lua index 0d8422bc2d3..3f8cc3b3d3c 100644 --- a/Master/texmf-dist/tex/context/base/char-map.lua +++ b/Master/texmf-dist/tex/context/base/char-map.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['char-map'] = { version = 1.001, - comment = "companion to char-ini.tex", + comment = "companion to char-ini.mkiv", author = "Hans Hagen & Arthur Reutenauer", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" diff --git a/Master/texmf-dist/tex/context/base/char-tex.lua b/Master/texmf-dist/tex/context/base/char-tex.lua new file mode 100644 index 00000000000..ed9a244d781 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/char-tex.lua @@ -0,0 +1,89 @@ +if not modules then modules = { } end modules ['char-tex'] = { + version = 1.001, + comment = "companion to char-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +characters = characters or { } +characters.tex = characters.tex or { } + +local find = string.find + +local accent_map = { + ['~'] = "̃" , -- ̃ Ẽ + ['"'] = "̈" , -- ̈ Ë + ["`"] = "̀" , -- ̀ È + ["'"] = "́" , -- ́ É + ["^"] = "̂" , -- ̂ Ê + -- ̄ Ē + -- ̆ Ĕ + -- ̇ Ė + -- ̉ Ẻ + -- ̌ Ě + -- ̏ Ȅ + -- ̑ Ȇ + -- ̣ Ẹ + -- ̧ Ȩ + -- ̨ Ę + -- ̭ Ḙ + -- ̰ Ḛ +} + +local accents = table.concat(table.keys(accent_map)) + +local function remap_accents(a,c,braced) + local m = accent_map[a] + if m then + return c .. m + elseif braced then + return "\\" .. a .. "{" .. c .. "}" + else + return "\\" .. a .. c + end +end + +local command_map = { + ["i"] = "ı" +} + +local function remap_commands(c) + local m = command_map[c] + if m then + return m + else + return "\\" .. c + end +end + +local P, C, R, S, Cs, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc +local U, lpegmatch = lpeg.patterns.utf8, lpeg.match + +local accents = (P('\\') * C(S(accents)) * (P("{") * C(U) * P("}" * Cc(true)) + C(U) * Cc(false))) / remap_accents +local commands = (P('\\') * C(R("az","AZ")^1)) / remap_commands + +local convert_accents = Cs((accents + P(1))^0) +local convert_commands = Cs((commands + P(1))^0) + +local no_l = P("{") / "" +local no_r = P("}") / "" + +local convert_accents_strip = Cs((no_l * accents * no_r + accents + P(1))^0) +local convert_commands_strip = Cs((no_l * commands * no_r + commands + P(1))^0) + +function characters.tex.toutf(str,strip) + if find(str,"\\") then -- we can start at teh found position + if strip then + str = lpegmatch(convert_commands_strip,str) + str = lpegmatch(convert_accents_strip,str) + else + str = lpegmatch(convert_commands,str) + str = lpegmatch(convert_accents,str) + end + end + return str +end + +--~ print(characters.tex.toutf([[\"{e}]]),true) +--~ print(characters.tex.toutf([[{\"{e}}]],true)) diff --git a/Master/texmf-dist/tex/context/base/char-utf.lua b/Master/texmf-dist/tex/context/base/char-utf.lua index 7dd5d914f79..6dd85fdc87d 100644 --- a/Master/texmf-dist/tex/context/base/char-utf.lua +++ b/Master/texmf-dist/tex/context/base/char-utf.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['char-utf'] = { version = 1.001, - comment = "companion to char-utf.tex", + comment = "companion to char-utf.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" diff --git a/Master/texmf-dist/tex/context/base/char-utf.mkiv b/Master/texmf-dist/tex/context/base/char-utf.mkiv index d21cd842cc2..16b4029d8cb 100644 --- a/Master/texmf-dist/tex/context/base/char-utf.mkiv +++ b/Master/texmf-dist/tex/context/base/char-utf.mkiv @@ -41,7 +41,7 @@ %D \definecomposedutf 318 108 126 % lcaron %D \stoptyping -\def\definecomposedutf#1 #2 #3 % +\unexpanded\def\definecomposedutf#1 #2 #3 % {\ctxlua{characters.filters.utf.add_grapheme("#1","#2","#3")}} \protect diff --git a/Master/texmf-dist/tex/context/base/chem-ini.lua b/Master/texmf-dist/tex/context/base/chem-ini.lua index 27b73484008..90874909207 100644 --- a/Master/texmf-dist/tex/context/base/chem-ini.lua +++ b/Master/texmf-dist/tex/context/base/chem-ini.lua @@ -1,12 +1,13 @@ if not modules then modules = { } end modules ['chem-ini'] = { version = 1.001, - comment = "companion to chem-ini.tex", + comment = "companion to chem-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local format, texsprint = string.format, tex.sprint +local lpegmatch = lpeg.match local trace_molecules = false trackers.register("chemistry.molecules", function(v) trace_molecules = v end) @@ -60,15 +61,15 @@ local parser = lpeg.Cs((csname + lowhigh + highlow + low + high + sign + any chemicals.moleculeparser = parser -- can be used to avoid functioncall function chemicals.molecule(str) - return parser:match(str) + return lpegmatch(parser,str) end function commands.molecule(str) if trace_molecules then - local rep = parser:match(str) + local rep = lpegmatch(parser,str) logs.report("chemistry", "molecule %s => %s",str,rep) texsprint(ctxcatcodes,rep) else - texsprint(ctxcatcodes,parser:match(str)) + texsprint(ctxcatcodes,lpegmatch(parser,str)) end end diff --git a/Master/texmf-dist/tex/context/base/chem-ini.mkiv b/Master/texmf-dist/tex/context/base/chem-ini.mkiv index da240dbace5..5184fe1a7a0 100644 --- a/Master/texmf-dist/tex/context/base/chem-ini.mkiv +++ b/Master/texmf-dist/tex/context/base/chem-ini.mkiv @@ -17,7 +17,7 @@ \unprotect %D \macros -%D {\molecule} +%D {molecule} %D %D Quick and dirty: %D diff --git a/Master/texmf-dist/tex/context/base/chem-str.lua b/Master/texmf-dist/tex/context/base/chem-str.lua index 8ab48fca2af..ad4cc6c1be4 100644 --- a/Master/texmf-dist/tex/context/base/chem-str.lua +++ b/Master/texmf-dist/tex/context/base/chem-str.lua @@ -1,6 +1,6 @@ if not modules then modules = { } end modules ['chem-str'] = { version = 1.001, - comment = "companion to chem-str.tex", + comment = "companion to chem-str.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" @@ -17,6 +17,7 @@ local format, gmatch, match, lower, gsub = string.format, string.gmatch, string. local concat, insert, remove = table.concat, table.insert, table.remove local apply = structure.processors.apply local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes +local lpegmatch = lpeg.match local variables = interfaces.variables @@ -59,7 +60,7 @@ local one_keys = { sb = "line", db = "line", tb = "line", ep = "line", es = "line", ed = "line", et = "line", sd = "line", ldd = "line", rdd = "line", - hb = "line", bb = "line", oe = "line", + hb = "line", bb = "line", oe = "line", bd = "line", bw = "line", z = "text", cz = "text", zt = "text", zn = "number", zbt = "text", zbn = "number", ztt = "text", ztn = "number", mov = "transform", sub = "transform", dir = "transform", off = "transform", @@ -141,10 +142,11 @@ local syntax = { local definitions = { } function chemicals.undefine(name) - definitions[name] = nil + definitions[lower(name)] = nil end function chemicals.define(name,spec,text) + name = lower(name) local dn = definitions[name] if not dn then dn = { } definitions[name] = dn end dn[#dn+1] = { @@ -154,7 +156,7 @@ function chemicals.define(name,spec,text) end local metacode, kind, keys, bonds, max, txt, textsize, rot, pstack -local molecule = chemicals.molecule -- or use chemicals.moleculeparser:match(...) +local molecule = chemicals.molecule -- or use lpegmatch(chemicals.moleculeparser,...) local function fetch(txt) local st = stack[txt] @@ -197,12 +199,12 @@ local pattern = lpeg.Cc(false) * lpeg.Cc(false) * lpeg.Cc(false) * text ) ---~ local n, operation, index, upto, set, text = pattern:match("RZ1357") +--~ local n, operation, index, upto, set, text = lpegmatch(pattern,"RZ1357") ---~ print(pattern:match("RZ=x")) 1 RZ false false false x ---~ print(pattern:match("RZ1=x")) 1 RZ 1 false false x ---~ print(pattern:match("RZ1..3=x")) 1 RZ 1 3 false x ---~ print(pattern:match("RZ13=x")) 1 RZ false false table x +--~ print(lpegmatch(pattern,"RZ=x")) 1 RZ false false false x +--~ print(lpegmatch(pattern,"RZ1=x")) 1 RZ 1 false false x +--~ print(lpegmatch(pattern,"RZ1..3=x")) 1 RZ 1 3 false x +--~ print(lpegmatch(pattern,"RZ13=x")) 1 RZ false false table x local function process(spec,text,n,rulethickness,rulecolor,offset) insert(stack,{ spec=spec, text=text, n=n }) @@ -216,7 +218,7 @@ local function process(spec,text,n,rulethickness,rulecolor,offset) process(di.spec,di.text,1,rulethickness,rulecolor) end else - local rep, operation, special, index, upto, set, text = pattern:match(s) + local rep, operation, special, index, upto, set, text = lpegmatch(pattern,s) if operation == "pb" then insert(pstack,kind) metacode[#metacode+1] = syntax.pb.direct @@ -441,7 +443,7 @@ function chemicals.stop() if trace_structure then logs.report("chemical", "metapost code:\n%s", mpcode) end - metapost.graphic(chemicals.instance,chemicals.format,mpcode,"") + metapost.graphic(chemicals.instance,chemicals.format,mpcode) metacode = nil end diff --git a/Master/texmf-dist/tex/context/base/chem-str.mkiv b/Master/texmf-dist/tex/context/base/chem-str.mkiv index 29c6fe939bd..1e17218c875 100644 --- a/Master/texmf-dist/tex/context/base/chem-str.mkiv +++ b/Master/texmf-dist/tex/context/base/chem-str.mkiv @@ -49,7 +49,7 @@ \unprotect -\def\setupchemical +\unexpanded\def\setupchemical {\dosingleempty\dosetupchemical} \def\dosetupchemical @@ -57,7 +57,7 @@ \let\setupchemicals\setupchemical -\def\setupchemicalframed +\unexpanded\def\setupchemicalframed {\dosingleempty\dosetupchemicalframed} \def\dosetupchemicalframed @@ -65,7 +65,7 @@ \def\chemicalparameter#1{\csname\??cm#1\endcsname} -\def\definechemical +\unexpanded\def\definechemical {\dosingleargument\dodefinechemical} % global \def\dodefinechemical[#1]#2% @@ -76,7 +76,7 @@ #2% flush \stopnointerference} -\def\definechemicalsymbol +\unexpanded\def\definechemicalsymbol {\dodoubleempty\dodefinechemicalsymbol} \def\dodefinechemicalsymbol[#1][#2]% @@ -100,6 +100,9 @@ \def\dochemicaltext#1% in ppchtex we had a more clever alignment {\dosetchemicaltext\strut#1} % maybe also \setstrut +\def\dochemicaltext#1% + {\mathematics{\dosetchemicaltext\strut\ifcase\currentxfontsize\or\scriptstyle\or\scriptscriptstyle\fi#1}} + \newconditional\indisplaychemical \unexpanded\def\startchemical @@ -437,7 +440,7 @@ \newif\ifinchemicalformula -\def\startchemicalformula +\unexpanded\def\startchemicalformula {\mathortext\vcenter\vbox\bgroup \forgetall \inchemicalformulatrue @@ -450,7 +453,7 @@ \setfalse\formulachemicalhastop \setfalse\formulachemicalhasbot } -\def\stopchemicalformula +\unexpanded\def\stopchemicalformula {\tabskip1em\relax \nointerlineskip \ifconditional\formulachemicalhastop diff --git a/Master/texmf-dist/tex/context/base/colo-ext.mkii b/Master/texmf-dist/tex/context/base/colo-ext.mkii index 06facd34ee2..473c010e028 100644 --- a/Master/texmf-dist/tex/context/base/colo-ext.mkii +++ b/Master/texmf-dist/tex/context/base/colo-ext.mkii @@ -42,12 +42,14 @@ %D handling. Here we deal with a per-document setting. \setupcolors - [\c!overprint=\v!no] + [\c!intent=\v!none] \def\starttextoverprint - {\doifelse\@@cloverprint\v!yes - {\let\stoptextoverprint\dostopoverprint\dostartoverprint} - {\let\stoptextoverprint\donothing}} + {\doifelse\@@clintent\v!overprint + {\glet\stoptextoverprint \dostopoverprint + \glet\starttextoverprint\dostartoverprint + \dostartoverprint} + {\glet\stoptextoverprint\donothing}} \let\stoptextoverprint\donothing diff --git a/Master/texmf-dist/tex/context/base/colo-ext.mkiv b/Master/texmf-dist/tex/context/base/colo-ext.mkiv index 1c1f0d1afff..b17608f59c1 100644 --- a/Master/texmf-dist/tex/context/base/colo-ext.mkiv +++ b/Master/texmf-dist/tex/context/base/colo-ext.mkiv @@ -41,17 +41,63 @@ %D or independent. For the moment we only support independent overprint %D handling. Here we deal with a per-document setting. +\unexpanded\def\startcolorintent[#1]% + {\pushattribute\colorintentattribute + \dotriggercolorintent{#1}} + +\unexpanded\def\stopcolorintent + {\popattribute\colorintentattribute} + +\unexpanded\def\startoverprint{\startcolorintent[\v!overprint]} +\unexpanded\def\stopoverprint {\stopcolorintent} + +\unexpanded\def\startknockout {\startcolorintent[\v!knockout ]} +\unexpanded\def\stopknockout {\stopcolorintent} + +\let\starttextcolorintent\relax +\let\stoptextcolorintent \relax + \setupcolors [\c!overprint=\v!no] -\def\starttextoverprint - {\doifelse\@@cloverprint\v!yes - {\let\stoptextoverprint\stopoverprintproperty\startoverprintproperty} - {\let\stoptextoverprint\donothing}} +\appendtoks + \dosettextcolorintent +\to \everysetupcolors + +\def\dosettextcolorintent + {\doifnot\@@clintent\v!none + {\xdef\starttextcolorintent{\noexpand\dotriggercolorintent{\@@clintent}}% + \glet\dosettextcolorintent\relax + \dotriggercolorintent\@@clintent}} + +\appendtoks \starttextcolorintent \to \everystarttextproperties +\appendtoks \stoptextcolorintent \to \everystoptextproperties + +\setupcolors[\c!intent=\v!none] + +% A goodie that replaces the startMPcolor hackery +% +% \definecolor[red-t] [r=1,t=0.5,a=1] +% \definecolor[green-t][g=1,t=0.5,a=1] +% \defineintermediatecolor[mycolora][0.5,red,green] +% \defineintermediatecolor[mycolorb][0.5,red-t,green-t] +% \starttext +% test {\mycolora OEPS} test +% test {\mycolorb OEPS} test +% \stoptext + +\unexpanded\def\defineintermediatecolor + {\dotripleempty\dodefineintermediatecolor} -\let\stoptextoverprint\donothing +\def\dodefineintermediatecolor[#1][#2][#3]% \dotripleempty adds {} inside [] + {\dododefineintermediatecolor[#1][#2][#3]} -\appendtoks \starttextoverprint \to \everystarttextproperties -\appendtoks \stoptextoverprint \to \everystoptextproperties +\def\dododefineintermediatecolor[#1][#2,#3,#4][#5]% + {\ifconditional\collectcolorsinlist\collectcolorinlist{#1}\fi + \ctxlua{colors.defineintermediatecolor("#1","#2", + \thecolorattribute{#3},\thecolorattribute{#4}, + \thetransparencyattribute{#3},\thetransparencyattribute{#4}, + "#5",false,\iffreezecolors true\else false\fi)}% not global + \dodefinecolorcommand\setvalue{#1}} \protect \endinput diff --git a/Master/texmf-dist/tex/context/base/colo-hex.mkii b/Master/texmf-dist/tex/context/base/colo-hex.mkii index 7cef6e8a2ad..db67f184102 100644 --- a/Master/texmf-dist/tex/context/base/colo-hex.mkii +++ b/Master/texmf-dist/tex/context/base/colo-hex.mkii @@ -11,8 +11,8 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\ifx\dodododefinecolor\undefined \else - \endinput +\ifdefined \hexcolorprefix + \expandafter \endinput \fi \writestatus{loading}{ConTeXt Color Macros / Hexadecimal} diff --git a/Master/texmf-dist/tex/context/base/colo-hex.mkiv b/Master/texmf-dist/tex/context/base/colo-hex.mkiv index dd8e039389b..f661b2445f1 100644 --- a/Master/texmf-dist/tex/context/base/colo-hex.mkiv +++ b/Master/texmf-dist/tex/context/base/colo-hex.mkiv @@ -23,4 +23,6 @@ % fill (point 0 of p -- point 3 of p -- point 2 of p --cycle) withcolor \MPcolor{hextestcolor} ; % top left part % \stopMPpage +\def\checkhexcolor[#1]{\doifcolorelse{#1}\donothing{\definecolor[#1][x=#1]}} % is this ok? + \endinput diff --git a/Master/texmf-dist/tex/context/base/colo-ini.lua b/Master/texmf-dist/tex/context/base/colo-ini.lua index a67df6a778e..342d6110dca 100644 --- a/Master/texmf-dist/tex/context/base/colo-ini.lua +++ b/Master/texmf-dist/tex/context/base/colo-ini.lua @@ -1,16 +1,19 @@ if not modules then modules = { } end modules ['colo-ini'] = { version = 1.000, - comment = "companion to colo-ini.tex", + comment = "companion to colo-ini.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } +local type = type local concat = table.concat -local format, gmatch, gsub, lower, match = string.format, string.gmatch, string.gsub, string.lower, string.match +local format, gmatch, gsub, lower, match, find = string.format, string.gmatch, string.gsub, string.lower, string.match, string.find local texsprint = tex.sprint local ctxcatcodes = tex.ctxcatcodes +local trace_define = false trackers.register("colors.define",function(v) trace_define = v end) + local settings_to_hash_strict = aux.settings_to_hash_strict colors = colors or { } @@ -23,89 +26,93 @@ local a_transparency = attributes.private('transparency') local a_colorspace = attributes.private('colormodel') local a_background = attributes.private('background') -local a_l_c_template = "\\setevalue{(ca:%s)}{%s}" .. - "\\setevalue{(cs:%s)}{\\dosetattribute{color}{%s}}" -local a_g_c_template = "\\setxvalue{(ca:%s)}{%s}" .. - "\\setxvalue{(cs:%s)}{\\dosetattribute{color}{%s}}" -local f_l_c_template = "\\setvalue {(ca:%s)}{\\doinheritca{%s}}" .. - "\\setvalue {(cs:%s)}{\\doinheritcs{%s}}" -local f_g_c_template = "\\setgvalue{(ca:%s)}{\\doinheritca{%s}}" .. - "\\setgvalue{(cs:%s)}{\\doinheritcs{%s}}" -local r_l_c_template = "\\localundefine{(ca:%s)}" .. - "\\localundefine{(cs:%s)}" -local r_g_c_template = "\\globalundefine{(ca:%s)}" .. - "\\globalundefine{(cs:%s)}" - -local a_l_t_template = "\\setevalue{(ta:%s)}{%s}" .. - "\\setevalue{(ts:%s)}{\\dosetattribute{transparency}{%s}}" -local a_g_t_template = "\\setxvalue{(ta:%s)}{%s}" .. - "\\setxvalue{(ts:%s)}{\\dosetattribute{transparency}{%s}}" -local f_l_t_template = "\\setvalue {(ta:%s)}{\\doinheritta{%s}}" .. - "\\setvalue {(ts:%s)}{\\doinheritts{%s}}" -local f_g_t_template = "\\setgvalue{(ta:%s)}{\\doinheritta{%s}}" .. - "\\setgvalue{(ts:%s)}{\\doinheritts{%s}}" -local r_l_t_template = "\\localundefine{(ta:%s)}" .. - "\\localundefine{(ts:%s)}" -local r_g_t_template = "\\globalundefine{(ta:%s)}" .. - "\\globalundefine{(ts:%s)}" +local register_color = colors.register +local attributes_list = attributes.list local function definecolor(name, ca, global) if ca and ca > 0 then if global then - texsprint(ctxcatcodes,format(a_g_c_template, name, ca, name, ca)) + if trace_define then + commands.writestatus("color","define global color '%s' with attribute: %s",name,ca) + end + context.colordefagc(name,ca) else - texsprint(ctxcatcodes,format(a_l_c_template, name, ca, name, ca)) + if trace_define then + commands.writestatus("color","define local color '%s' with attribute: %s",name,ca) + end + context.colordefalc(name,ca) end else if global then - texsprint(ctxcatcodes,format(r_g_c_template, name, name)) + context.colordefrgc(name) else - texsprint(ctxcatcodes,format(r_l_c_template, name, name)) + context.colordefrlc(name) end end end + local function inheritcolor(name, ca, global) if ca and ca ~= "" then if global then - texsprint(ctxcatcodes,format(f_g_c_template, name, ca, name, ca)) + if trace_define then + commands.writestatus("color","inherit global color '%s' with attribute: %s",name,ca) + end + context.colordeffgc(name,ca) else - texsprint(ctxcatcodes,format(f_l_c_template, name, ca, name, ca)) + if trace_define then + commands.writestatus("color","inherit local color '%s' with attribute: %s",name,ca) + end + context.colordefflc(name,ca) end else if global then - texsprint(ctxcatcodes,format(r_g_c_template, name, name)) + context.colordefrgc(name) else - texsprint(ctxcatcodes,format(r_l_c_template, name, name)) + context.colordefrlc(name) end end end + local function definetransparent(name, ta, global) if ta and ta > 0 then if global then - texsprint(ctxcatcodes,format(a_g_t_template, name, ta, name, ta)) + if trace_define then + commands.writestatus("color","define global transparency '%s' with attribute: %s",name,ta) + end + context.colordefagt(name,ta) else - texsprint(ctxcatcodes,format(a_l_t_template, name, ta, name, ta)) + if trace_define then + commands.writestatus("color","define local transparency '%s' with attribute: %s",name,ta) + end + context.colordefalt(name,ta) end else if global then - texsprint(ctxcatcodes,format(r_g_t_template, name, name)) + context.colordefrgt(name) else - texsprint(ctxcatcodes,format(r_l_t_template, name, name)) + context.colordefrlt(name) end end end + local function inherittransparent(name, ta, global) if ta and ta ~= "" then if global then - texsprint(ctxcatcodes,format(f_g_t_template, name, ta, name, ta)) + if trace_define then + commands.writestatus("color","inherit global transparency '%s' with attribute: %s",name,ta) + end + context.colordeffgt(name,ta) else - texsprint(ctxcatcodes,format(f_l_t_template, name, ta, name, ta)) + if trace_define then + commands.writestatus("color","inherit local transparency '%s' with attribute: %s",name,ta) + end + context.colordefflt(name,ta) end else if global then - texsprint(ctxcatcodes,format(r_g_t_template, name, name)) + context.colordefrgt(name) else - texsprint(ctxcatcodes,format(r_l_t_template, name, name)) + context.colordefrlt(name) end end end @@ -144,6 +151,9 @@ local function do_registerspotcolor(parent,name,parentnumber,e,f,d,p) if v then local kind = colors.default -- else problems with shading etc if kind == 1 then kind = v[1] end + if e and e ~= "" then + registrations.spotcolorname(parent,e) -- before registration of the color + end if kind == 2 then -- name noffractions names p's r g b registrations.grayspotcolor(parent,f,d,p,v[2]) elseif kind == 3 then @@ -151,9 +161,6 @@ local function do_registerspotcolor(parent,name,parentnumber,e,f,d,p) elseif kind == 4 then registrations.cmykspotcolor(parent,f,d,p,v[6],v[7],v[8],v[9]) end - if e and e ~= "" then - registrations.spotcolorname(parent,e) - end end registered[parentnumber] = true end @@ -178,45 +185,64 @@ local function do_registermultitonecolor(parent,name,parentnumber,e,f,d,p) -- sa end function colors.definesimplegray(name,s) - return colors.register(name,'gray',s) -- we still need to get rid of 'color' + return register_color(name,'gray',s) -- we still need to get rid of 'color' end function colors.defineprocesscolor(name,str,global,freeze) -- still inconsistent color vs transparent - local r = match(str,"^#(.+)$") -- for old times sake (if we need to feed from xml or so) - local t = (r and { h = r }) or settings_to_hash_strict(str) - if t then - if t.h then - local r, g, b = match(t.h .. "000000","(..)(..)(..)") -- watch the 255 - definecolor(name, colors.register(name,'rgb',(tonumber(r,16) or 0)/255,(tonumber(g,16) or 0)/255,(tonumber(b,16) or 0)/255 ), global) - elseif t.r or t.g or t.b then - definecolor(name, colors.register(name,'rgb', tonumber(t.r) or 0, tonumber(t.g) or 0, tonumber(t.b) or 0 ), global) - elseif t.c or t.m or t.y or t.k then - definecolor(name, colors.register(name,'cmyk',tonumber(t.c) or 0, tonumber(t.m) or 0, tonumber(t.y) or 0, tonumber(t.k) or 0), global) + local x = match(str,"^#(.+)$") -- for old times sake (if we need to feed from xml or so) + if x then + local r, g, b = match(x .. "000000","(..)(..)(..)") -- watch the 255 + definecolor(name, register_color(name,'rgb',(tonumber(r,16) or 0)/255,(tonumber(g,16) or 0)/255,(tonumber(b,16) or 0)/255), global) + else + local settings = settings_to_hash_strict(str) + if settings then + local r, g, b = settings.r, settings.g, settings.b + if r or g or b then + definecolor(name, register_color(name,'rgb', tonumber(r) or 0, tonumber(g) or 0, tonumber(b) or 0), global) + else + local c, m, y, k = settings.c, settings.m, settings.y, settings.k + if c or m or y or b then + definecolor(name, register_color(name,'cmyk',tonumber(c) or 0, tonumber(m) or 0, tonumber(y) or 0, tonumber(k) or 0), global) + else + local h, s, v = settings.h, settings.s, settings.v + if v then + r, g, b = colors.hsvtorgb(tonumber(h) or 0, tonumber(s) or 1, tonumber(v) or 1) -- maybe later native + definecolor(name, register_color(name,'rgb',r,g,b), global) + else + local x = settings.x or h + if x then + r, g, b = match(x .. "000000","(..)(..)(..)") -- watch the 255 + definecolor(name, register_color(name,'rgb',(tonumber(r,16) or 0)/255,(tonumber(g,16) or 0)/255,(tonumber(b,16) or 0)/255), global) + else + definecolor(name, register_color(name,'gray',tonumber(s) or 0), global) + end + end + end + end + local a, t = settings.a, settings.t + if a and t then + definetransparent(name, transparencies.register(name,transparent[a] or tonumber(a) or 1,tonumber(t) or 1), global) + elseif colors.couple then + -- definetransparent(name, transparencies.register(nil, 1, 1), global) -- can be sped up + definetransparent(name, 0, global) -- can be sped up + end + elseif freeze then + local ca = attributes_list[a_color] [str] + local ta = attributes_list[a_transparency][str] + if ca then + definecolor(name, ca, global) + end + if ta then + definetransparent(name, ta, global) + end else - definecolor(name, colors.register(name,'gray',tonumber(t.s) or 0), global) - end - if t.a and t.t then - definetransparent(name, transparencies.register(name,transparent[t.a] or tonumber(t.a) or 1,tonumber(t.t) or 1), global) - elseif colors.couple then - -- definetransparent(name, transparencies.register(nil, 1, 1), global) -- can be sped up - definetransparent(name, 0, global) -- can be sped up + inheritcolor(name, str, global) + inherittransparent(name, str, global) + -- if global and str ~= "" then -- For Peter Rolf who wants access to the numbers in Lua. (Currently only global is supported.) + -- attributes_list[a_color] [name] = attributes_list[a_color] [str] or attributes.unsetvalue -- reset + -- attributes_list[a_transparency][name] = attributes_list[a_transparency][str] or attributes.unsetvalue + -- end end - elseif freeze then - local ca = attributes.list[a_color] [str] - local ta = attributes.list[a_transparency][str] - if ca then - definecolor(name, ca, global) - end - if ta then - definetransparent(name, ta, global) - end - else - inheritcolor(name, str, global) - inherittransparent(name, str, global) - -- if global and str ~= "" then -- For Peter Rolf who wants access to the numbers in Lua. (Currently only global is supported.) - -- attributes.list[a_color] [name] = attributes.list[a_color] [str] or attributes.unsetvalue -- reset - -- attributes.list[a_transparency][name] = attributes.list[a_transparency][str] or attributes.unsetvalue - -- end end end @@ -226,19 +252,20 @@ function colors.isblack(ca) -- maybe commands end function colors.definespotcolor(name,parent,str,global) - if parent == "" or parent:find("=") then + if parent == "" or find(parent,"=") then colors.registerspotcolor(name, parent) elseif name ~= parent then - local cp = attributes.list[a_color][parent] + local cp = attributes_list[a_color][parent] if cp then local t = settings_to_hash_strict(str) if t then - t.p = tonumber(t.p) or 1 - do_registerspotcolor(parent, name, cp, t.e, 1, "", t.p) -- p not really needed, only diagnostics + local tp = tonumber(t.p) or 1 + do_registerspotcolor(parent, name, cp, t.e, 1, "", tp) -- p not really needed, only diagnostics if name and name ~= "" then - definecolor(name, colors.register(name,'spot', parent, 1, "", t.p), true) - if t.a and t.t then - definetransparent(name, transparencies.register(name,transparent[t.a] or tonumber(t.a) or 1,tonumber(t.t) or 1), global) + definecolor(name, register_color(name,'spot', parent, 1, "", tp), true) + local ta, tt = t.a, t.t + if ta and tt then + definetransparent(name, transparencies.register(name,transparent[ta] or tonumber(ta) or 1,tonumber(tt) or 1), global) elseif colors.couple then --~ definetransparent(name, transparencies.register(nil, 1, 1), global) -- can be sped up definetransparent(name, 0, global) -- can be sped up @@ -250,7 +277,7 @@ function colors.definespotcolor(name,parent,str,global) end function colors.registerspotcolor(parent, str) - local cp = attributes.list[a_color][parent] + local cp = attributes_list[a_color][parent] if cp then local e = "" if str then @@ -275,11 +302,11 @@ function colors.definemultitonecolor(name,multispec,colorspec,selfspec) dd, pp, nn = concat(dd,','), concat(pp,','), concat(nn,'_') local parent = gsub(lower(nn),"[^%d%a%.]+","_") colors.defineprocesscolor(parent,colorspec..","..selfspec,true,true) - local cp = attributes.list[a_color][parent] + local cp = attributes_list[a_color][parent] if cp then do_registerspotcolor(parent, name, cp, "", nof, dd, pp) do_registermultitonecolor(parent, name, cp, "", nof, dd, pp) - definecolor(name, colors.register(name, 'spot', parent, nof, dd, pp), true) + definecolor(name, register_color(name, 'spot', parent, nof, dd, pp), true) local t = settings_to_hash_strict(selfspec) if t and t.a and t.t then definetransparent(name, transparencies.register(name,transparent[t.a] or tonumber(t.a) or 1,tonumber(t.t) or 1), global) @@ -350,7 +377,7 @@ function colors.formatgray(ca,separator) return format("%0.3f",(cv and cv[2]) or 0) end -function colors.colorcomponents(ca) +function colors.colorcomponents(ca) -- return list local cv = colors.value(ca) if cv then local model = cv[1] @@ -405,3 +432,49 @@ function colors.spotcolorvalue(ca,default) end return tostring(v) end + +-- experiment (a bit of a hack, as we need to get the attribute number) + +local min = math.min + +-- a[b,c] -> b+a*(c-b) + +local function f(one,two,i,fraction) + local o, t = one[i], two[i] + local otf = o + fraction * (t - o) + if otf > 1 then + otf = 1 + end + return otf +end + +function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,specs,global,freeze) + fraction = tonumber(fraction) or 1 + local one, two = colors.value(c_one), colors.value(c_two) + if one and two then + local csone, cstwo = one[1], two[1] + if csone == cstwo then + -- actually we can set all 8 values at once here but this is cleaner as we avoid + -- problems with weighted gray conversions and work with original values + local ca + if csone == 2 then + ca = register_color(name,'gray',f(one,two,2,fraction)) + elseif csone == 3 then + ca = register_color(name,'rgb',f(one,two,3,fraction),f(one,two,4,fraction),f(one,two,5,fraction)) + elseif csone == 4 then + ca = register_color(name,'cmyk',f(one,two,6,fraction),f(one,two,7,fraction),f(one,two,8,fraction),f(one,two,9,fraction)) + else + ca = register_color(name,'gray',f(one,two,2,fraction)) + end + definecolor(name,ca,global,freeze) + end + end + local one, two = transparencies.value(a_one), transparencies.value(a_two) + local t = settings_to_hash_strict(specs) + local ta = tonumber((t and t.a) or (one and one[1]) or (two and two[1])) + local tt = tonumber((t and t.t) or (one and two and f(one,two,2,fraction))) + if ta and tt then +--~ print(ta,tt) + definetransparent(name,transparencies.register(name,ta,tt),global) + end +end diff --git a/Master/texmf-dist/tex/context/base/colo-ini.mkiv b/Master/texmf-dist/tex/context/base/colo-ini.mkiv index e7a84980578..1bceb5aa70a 100644 --- a/Master/texmf-dist/tex/context/base/colo-ini.mkiv +++ b/Master/texmf-dist/tex/context/base/colo-ini.mkiv @@ -59,6 +59,7 @@ \newif\ifSPOTsupported \newif\ifpreferGRAY \newif\ifGRAYprefered +\newif\ifconvertGRAY \convertGRAYtrue \newif\ifreduceCMYK \newif\ifconverttoGRAY \newif\ifweightGRAY \weightGRAYtrue @@ -109,11 +110,11 @@ %D %D \getbuffer \typebuffer -\def\definecolor {\dodoubleargument\dodefinecolor} -\def\defineglobalcolor {\dodoubleargument\dodefineglobalcolor} -\def\definenamedcolor {\dodoubleargument\dodefinenamedcolor} -\def\definespotcolor {\dotripleargument\dodefinespotcolor} -\def\definemultitonecolor{\doquadrupleempty\dodefinemultitonecolor} +\unexpanded\def\definecolor {\dodoubleargument\dodefinecolor} +\unexpanded\def\defineglobalcolor {\dodoubleargument\dodefineglobalcolor} +\unexpanded\def\definenamedcolor {\dodoubleargument\dodefinenamedcolor} +\unexpanded\def\definespotcolor {\dotripleargument\dodefinespotcolor} +\unexpanded\def\definemultitonecolor{\doquadrupleempty\dodefinemultitonecolor} %D \macros %D {startcolor,stopcolor, @@ -143,15 +144,15 @@ \unexpanded\def\color [#1]{\groupedcommand{\doactivatecolor{#1}}{}} \unexpanded\def\startcolor [#1]{\begingroup\doactivatecolor{#1}} \unexpanded\def\stopcolor {\endgroup} -\unexpanded\def\graycolor [#1]{\groupedcommand{\setcolormodel{gray}\getvalue{#1}}{}} +\unexpanded\def\graycolor [#1]{\groupedcommand{\dosetcolormodel{gray}\getvalue{#1}}{}} \unexpanded\def\colored [#1]{\groupedcommand{\definecolor[@colored@][#1]\doactivatecolor{@colored@}}{}} \unexpanded\def\fastcolored [#1]#2{\begingroup\dodefinefastcolor[@colored@][#1]\doactivatecolor{@colored@}#2\endgroup} \def\predefinecolor [#1]{\flushatshipout{\hbox{\color[#1]{}}}} \def\predefineindexcolor[#1]{\flushatshipout{\hbox{\color[#1]{}}}} - \def\startcolorpage {\startcolor[\ifx\maintextcolor\empty\defaulttextcolor\else\maintextcolor\fi]} - \def\stopcolorpage {\stopcolor} - \def\startraster [#1]{\dosetrastercolor{#1}} - \def\stopraster {} + \unexpanded\def\startcolorpage {\startcolor[\ifx\maintextcolor\empty\defaulttextcolor\else\maintextcolor\fi]} + \unexpanded\def\stopcolorpage {\stopcolor} + \unexpanded\def\startraster [#1]{\dosetrastercolor{#1}} + \unexpanded\def\stopraster {} \def\raster [#1]{\groupedcommand{\dosetrastercolor{#1}}{}} \def\faststartcolor [#1]{\doactivatecolor{#1}} \def\faststopcolor {} @@ -159,11 +160,30 @@ \let\grey\graycolor +%D Stacking: + +% \colormodelattribute \colorattribute \transparencyattribute + +\newcount\currentcolornesting + +\unexpanded\def\pushcolor[#1]% + {\global\advance\currentcolornesting\plusone + \expandafter\edef\csname\??cl:s:\number\currentcolornesting\endcsname + {\attribute\colormodelattribute \the\attribute\colormodelattribute + \attribute\colorattribute \the\attribute\colorattribute + \attribute\transparencyattribute\the\attribute\transparencyattribute + \space}% stack + \doactivatecolor{#1}} + +\unexpanded\def\popcolor + {\csname\??cl:s:\number\currentcolornesting\endcsname + \global\advance\currentcolornesting\minusone} + %D \macros %D {startcurrentcolor,stopcurrentcolor} -\def\startcurrentcolor{\startcolor[\outercolorname]} -\def\stopcurrentcolor {\stopcolor} +\unexpanded\def\startcurrentcolor{\startcolor[\outercolorname]} +\unexpanded\def\stopcurrentcolor {\stopcolor} %D \macros %D {setupcolor} @@ -188,7 +208,7 @@ \let\colorstyle\empty -\def\setupcolor +\unexpanded\def\setupcolor {\dosingleargument\dosetupcolor} \def\dosetupcolor[#1]% @@ -213,7 +233,7 @@ %D %D This command numbers to names: -\def\definetransparency +\unexpanded\def\definetransparency {\dodoubleargument\dodefinetransparency} \unexpanded\def\setupcolors @@ -221,6 +241,8 @@ \let\showcolormessage\gobblethreearguments +\newtoks\everysetupcolors + \def\dosetupcolors[#1]% some no longer make sense in MkIV {\getparameters[\??cl][#1]% \doifelse\@@clspot\v!yes @@ -248,9 +270,12 @@ {\ifMPspotcolors \showcolormessage\m!colors {9}{\v!mp\v!spot}\MPspotcolorsfalse \fi} {\ifMPspotcolors \else\showcolormessage\m!colors{10}{\v!mp\v!spot}\MPspotcolorstrue \fi}% \preferGRAYfalse + \convertGRAYtrue \processaction [\@@clconversion] [ \v!yes=>\preferGRAYtrue, + \v!no=>\convertGRAYfalse, + \v!never=>\convertGRAYfalse, \v!always=>\preferGRAYtrue\RGBsupportedfalse\CMYKsupportedfalse]% \ifRGBsupported \converttoGRAYfalse @@ -273,7 +298,7 @@ {\incolorfalse\forcegrayMPcolorstrue}% {\ifincolor\else\showcolormessage\m!colors1\colorstyle\fi\incolortrue\let\@@clstate\v!start}% \dosetupcolormodel - \initializemaintextcolor} + \the\everysetupcolors} %D In this documentation we will not go into too much details %D on palets. Curious users can find more information on this @@ -340,7 +365,7 @@ %D This saves us some typing in for instance the modules that %D deal with pretty verbatim typesetting. -\def\definepalet +\unexpanded\def\definepalet {\dodoubleargument\dodefinepalet} \def\dodefinepalet[#1][#2]% @@ -383,7 +408,7 @@ \let\currentpalet\empty -\def\setuppalet +\unexpanded\def\setuppalet {\dosingleempty\dosetuppalet} \def\dosetuppalet[#1]% @@ -447,7 +472,7 @@ %D defining each color separate, it also loads faster and takes %D less bytes. -\def\definecolorgroup +\unexpanded\def\definecolorgroup {\dotripleempty\dodefinecolorgroup} \def\dododefinecolorgroupgray [#1][#2:#3]{\definecolor [#1:\the\colorcount][s=#2]} @@ -689,32 +714,43 @@ \ifx\currentcolormodel\undefined \newcount\currentcolormodel \fi -\def\setcolormodel#1% - {\showcolormessage\m!colors1{#1}% - \currentcolormodel\ctxlua{tex.print(colors.setmodel('#1',\ifweightGRAY true\else false\fi))}% - \dosetattribute{colormodel}{\the\currentcolormodel}} +% \def\setcolormodel#1% +% {\showcolormessage\m!colors1{#1}% +% \dosetcolormodel{#1}} + +\def\dosetcolormodel#1% no message + {\currentcolormodel\ctxlua{tex.print(colors.setmodel('#1',\ifweightGRAY true\else false\fi))}% + \attribute\colormodelattribute\currentcolormodel} + +\dosetcolormodel{all} -\setcolormodel{all} \def\dosetupcolormodel {\ifincolor \ifRGBsupported \ifCMYKsupported - \setcolormodel{all}% + \dosetcolormodel{all}% \else - \setcolormodel{rgb}% + \dosetcolormodel{rgb}% \fi \else \ifCMYKsupported - \setcolormodel{cmyk}% + \dosetcolormodel{cmyk}% \else - \setcolormodel{gray}% + \ifconvertGRAY + \dosetcolormodel{gray}% + \else + \dosetcolormodel{none}% + \fi \fi \fi \else - \setcolormodel{gray}% - \fi - }%\aftergroup\dosetupcolormodel} + \ifconvertGRAY + \dosetcolormodel{gray}% + \else + \dosetcolormodel{none}% + \fi + \fi} \appendtoks \dosetupcolormodel @@ -749,7 +785,7 @@ \def\doactivatecolor {\ifproductionrun - \ctxlua{colors.enabled=true transparencies.enabled=true}% not that efficient but at least robust + \ctxlua{colors.enable() transparencies.enable()}% not that efficient but at least robust \let\doactivatecolor\normaldoactivatecolor \expandafter\doactivatecolor \else @@ -802,7 +838,7 @@ \ifx\@@rastervalue\empty \let\@@rastervalue\@@rsscreen \fi - \dosetattribute\s!color{\ctxlua{tex.sprint(colors.definesimplegray("_raster_",\@@rastervalue))}}} + \attribute\colorattribute\ctxlua{tex.sprint(colors.definesimplegray("_raster_",\@@rastervalue))}\relax} \def\dodefinefastcolor[#1][#2]% still not fast but ok {\ctxlua{colors.defineprocesscolor("#1","#2",false,\iffreezecolors true\else false\fi)}% @@ -874,19 +910,14 @@ \def\defaulttextcolor {black} \def\@@themaintextcolor{themaintextcolor} -\appendtoks\deactivatecolor\to\everybeforeoutput - -\def\startregistercolor[#1]% probably obsolete - {\doifelsenothing{#1} - {\let\stopregistercolor\relax} - {\edef\stopregistercolor - {\dosetattribute\s!color {\dogetattribute\s!color }% - \dosetattribute\s!transparency{\dogetattribute\s!transparency}}% - \doactivatecolor{#1}}} +\appendtoks + \deactivatecolor + \ifx\maintextcolor\empty\else\doactivatecolor\maintextcolor\fi +\to \everybeforeoutput \def\registermaintextcolor{\ctxlua{colors.main = \thecolorattribute\maintextcolor}} -\def\starttextcolor[#1]% +\unexpanded\def\starttextcolor[#1]% {\doifsomething{#1} {\definecolor[\@@themaintextcolor][#1]% \let\maintextcolor\@@themaintextcolor @@ -904,6 +935,7 @@ \registermaintextcolor} \appendtoks \initializemaintextcolor \to \everyjob +\appendtoks \initializemaintextcolor \to \everysetupcolors \def\dodefinepaletcolor#1#2#3% {\doifassignmentelse{#3}% \definepalet[test][xx={y=.4}] @@ -927,6 +959,23 @@ \def\doinheritta#1{\csname(ta:\ifcsname(ta:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ta:#1)\endcsname#1\fi\fi)\endcsname} \def\doinheritts#1{\csname(ts:\ifcsname(ts:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ts:#1)\endcsname#1\fi\fi)\endcsname} +%D Low level defs: + +\def\colordefalc#1#2{\setevalue{(ca:#1)}{#2}\setvalue {(cs:#1)}{\attribute\colorattribute #2 }} +\def\colordefagc#1#2{\setxvalue{(ca:#1)}{#2}\setvalue {(cs:#1)}{\attribute\colorattribute #2 }} +\def\colordefalt#1#2{\setevalue{(ta:#1)}{#2}\setvalue {(ts:#1)}{\attribute\transparencyattribute#2 }} +\def\colordefagt#1#2{\setxvalue{(ta:#1)}{#2}\setgvalue{(ts:#1)}{\attribute\transparencyattribute#2 }} + +\def\colordefflc#1#2{\setvalue {(ca:#1)}{\doinheritca{#2}}\setvalue {(cs:#1)}{\doinheritcs{#2}}} +\def\colordeffgc#1#2{\setgvalue{(ca:#1)}{\doinheritca{#2}}\setvalue {(cs:#1)}{\doinheritcs{#2}}} +\def\colordefflt#1#2{\setvalue {(ta:#1)}{\doinheritta{#2}}\setvalue {(ts:#1)}{\doinheritts{#2}}} +\def\colordeffgt#1#2{\setgvalue{(ta:#1)}{\doinheritta{#2}}\setgvalue{(ts:#1)}{\doinheritts{#2}}} + +\def\colordefrlc #1{\localundefine {(ca:#1)}\localundefine {(cs:#1)}} +\def\colordefrgc #1{\globalundefine{(ca:#1)}\globalundefine{(cs:#1)}} +\def\colordefrlt #1{\localundefine {(ta:#1)}\localundefine {(ts:#1)}} +\def\colordefrgt #1{\globalundefine{(ta:#1)}\globalundefine{(ts:#1)}} + %D \macros %D {colorvalue, grayvalue} %D diff --git a/Master/texmf-dist/tex/context/base/colo-new.mkii b/Master/texmf-dist/tex/context/base/colo-new.mkii deleted file mode 100644 index 5413d7a1208..00000000000 --- a/Master/texmf-dist/tex/context/base/colo-new.mkii +++ /dev/null @@ -1,1857 +0,0 @@ -%D \module -%D [ file=colo-ini, -%D version=1997.04.01, -%D title=\CONTEXT\ Color Macros, -%D subtitle=Initialization, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA / Hans Hagen \& Ton Otten}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\unprotect - -\chardef\colorversion=1 - -%D Color support is not present in \TEX. Colorful output can -%D however be accomplished by using specials. This also means -%D that this support depends on the \DVI\ driver used. At the -%D moment this module was written, still no decent standard on -%D color specials has been agreed upon. We therefore decided to -%D implement a mechanism that is as independant as possible of -%D drivers. -%D -%D Color support shares with fonts that is must be implemented -%D in a way that permits processing of individual \DVI\ pages. -%D Furthermore it should honour grouping. The first condition -%D forces us to use a scheme that keeps track of colors at -%D page boundaries. This can be done by means of \TEX's -%D marking mechanism (\type{\mark}). -%D -%D When building pages, \TEX\ periodically looks at the -%D accumulated typeset contents and breaks the page when -%D suitable. At that moment, control is transfered to the -%D output routine. This routine takes care of building the -%D pagebody and for instance adds headers and footers. The page -%D can be broken in the middle of some colored text, but -%D headers and footers are often in black upon white or -%D background. If colors are applied there, they definitely -%D are used local, which means that they don't cross page -%D borders. -%D -%D Boxes are handled as a whole, which means that when we -%D apply colors inside a box, those colors don't cross page -%D boundaries, unless of course boxes are split or unboxed. -%D Especially in interactive texts, colors are often used in -%D such a local way: in boxes (buttons and navigational tools) -%D or in the pagebody (backgrounds). -%D -%D So we can distinguish local colors, that don't cross -%D pages from global colors, of which we can end many pages -%D later. The color macros will treat both types in a different -%D way, thus gaining some speed. -%D -%D This module also deals with gray scales. Because similar -%D colors can end up in the same gray scale when printed in -%D black and white, we also implement a palet system that deals -%D with these matters. Because of fundamental differences -%D between color and gray scale printing, in \CONTEXT\ we also -%D differ between these. For historic reasons |<|we first -%D implemented gray scales using patterns of tiny periods|>| -%D and therefore called them {\em rasters}. So don't be -%D surprised if this term shows up. - -%D \macros -%D {definecolor} -%D -%D We will enable users to specify colors in \cap{RGB} and -%D \cap{CMYK} color spaces or gray scales using -%D -%D \showsetup{definecolor} -%D -%D For example: -%D -%D \starttyping -%D \definecolor [SomeKindOfRed] [r=.8,g=.05,b=.05] -%D \stoptyping -%D -%D Such color specifications are saved in a macro in the -%D following way: -%D -%D \starttyping -%D \setvalue{\??cr name}{R:r:g:b} -%D \setvalue{\??cr name}{C:c:m:y:k} -%D \setvalue{\??cr name}{S:s} -%D \stoptyping -%D -%D Gray scales are specified with the \type{s} parameter, -%D where the \type {s} is derived from {\em screen}. -%D -%D Starting with \PDF\ 1.4 (2001) \CONTEXT\ supports -%D transparent colors. The transparency factor is represented -%D by a \type {t} and the transparency method by an \type {a} -%D (alternative). Later we will implement more control -%D (probably by symbolic methods. So, currently the data is -%D stored as follows: -%D -%D \starttyping -%D \setvalue{\??cr name}{R:r:g:b:a:t} -%D \setvalue{\??cr name}{C:c:m:y:k:a:t} -%D \setvalue{\??cr name}{S:s:a:t} -%D \stoptyping - -% r g b : rbg -% c m y k : cmyk -% s : gray -% p n d f : spot -% h : hexadecimal -% t a : transparency -% e : equivalent (spotcolors) - -\def\@@cl@@z{0} -\def\@@cl@@o{1} - -\def\@@resetcolorparameters - {\let\@@cl@@r\@@cl@@z \let\@@cl@@g\@@cl@@z \let\@@cl@@b\@@cl@@z - \let\@@cl@@c\@@cl@@z \let\@@cl@@m\@@cl@@z \let\@@cl@@y\@@cl@@z \let\@@cl@@k\@@cl@@z - \let\@@cl@@s\@@cl@@z - \let\@@cl@@p\@@cl@@o \let\@@cl@@n\empty \let\@@cl@@d\empty \let\@@cl@@f\@@cl@@o - \let\@@cl@@h\empty - \let\@@cl@@e\empty - \let\@@cl@@t\@@cl@@z \let\@@cl@@a\@@cl@@z} - -\@@resetcolorparameters - -\def\@@cl@@A{\@@cl@@a} % a hook for symbolic conversion, see below - -%D Handling a few nested \type{\cs}'s is no problem (\type -%D {\@EA\@EAEAEA\@EA}) but we need a full expansion, so I -%D tried one of the fully expandable primitives using a sort -%D of delimited thing. I tried \type {\number} first, but this -%D does not work, but \type {\romannumeral} does. Actually, -%D \type{\romannumeral0} returns nothing, so it's a perfect -%D candidate for this kind of hackery. This reminds me that I -%D have to look into David Kastrup's Euro\TeX\ 2002 article -%D because he is using \type {\romannumeral} for loops -%D (repetitive \quote {m} stuff). - -% \def\x{\y}\def\y{\z}\def\z{0:1:1:1} -% -% \def\bla #1:#2:#3\end{} -% -% \@EA\bla\romannumeral\x\end - -\def\colorXpattern{0S:\@@cl@@z:\@@cl@@z:\@@cl@@z} -\def\colorZpattern{0S:\@@cl@@z:\@@cl@@A:\@@cl@@t} -\def\colorSpattern{0S:\@@cl@@s:\@@cl@@A:\@@cl@@t} -\def\colorCpattern{0C:\@@cl@@c:\@@cl@@m:\@@cl@@y:\@@cl@@k:\@@cl@@A:\@@cl@@t} -\def\colorRpattern{0R:\@@cl@@r:\@@cl@@g:\@@cl@@b:\@@cl@@A:\@@cl@@t} - -%def\colorPpattern{0P:\@@cl@@n:\@@cl@@p:\@@cl@@A:\@@cl@@t} - -\def\colorPpattern{0P:\@@cl@@n:\@@cl@@f:\@@cl@@d:\@@cl@@p:\@@cl@@A:\@@cl@@t} - -%D The extra 0 catches empty colors specs (needed for the -%D \type {\MPcolor} and \type {\PDFcolor} conversion (\type -%D {\@@cr} equals \type {\relax}!). - -\def\handlecolorwith#1{\@EA#1\romannumeral0} - -%D Next comes the main definition macro. - -\def\dodefinecolor {\dododefinecolor\relax \setvalue \setevalue1} -\def\dodefineglobalcolor{\dododefinecolor\doglobal\setgvalue\setxvalue1} -\def\dodefinenamedcolor {\dododefinecolor\doglobal\setvalue \setevalue0} - -\def\dododefinecolor#1#2#3#4[#5][#6]% #2==set(g)value #3==set[e|x]value - {#1\addtocommalist{#5}\colorlist % optional - \doifassignmentelse{#6} - {\@@resetcolorparameters - \getparameters[\??cl @@][#6]% - \ifx\@@cl@@h\empty - \doifelse{\@@cl@@r\@@cl@@g\@@cl@@b}{\@@cl@@z\@@cl@@z\@@cl@@z} - {\doifelse{\@@cl@@c\@@cl@@m\@@cl@@y\@@cl@@k}{\@@cl@@z\@@cl@@z\@@cl@@z\@@cl@@z} - {\doifelse\@@cl@@s\@@cl@@z - {\showmessage\m!colors8{{[#6]},#5}% - #3{\??cr#5}{\colorZpattern}} - {#3{\??cr#5}{\colorSpattern}}} - {#3{\??cr#5}{\colorCpattern}}} - {#3{\??cr#5}{\colorRpattern}}% - \else - \setxvalue{\??cr#5}{\colorHpattern}% - \fi - % new: e=external spot color name - \ifx\@@cl@@e\empty \else - \doregisterspotcolorname{#5}\@@cl@@e - \fi} - {\doifelsenothing\currentpalet - \donefalse - {\doifdefinedelse{\??cr\currentpalet#6}\donetrue\donefalse}% - \ifdone - \doifnot{#5}{#6} - {#2{\??cr#5}{\paletcolorspec{#6}}}% - \else - \doifdefinedelse{\??cr#6} - {\doifelse{#5}{#6} - {% this way we can freeze \definecolor[somecolor][somecolor] - % and still prevent cyclic definitions - \iffreezecolors#3{\??cr#5}{\csname\??cr#6\endcsname}\fi} - {\iffreezecolors\@EA#3\else\@EA#2\fi{\??cr#5}{\csname\??cr#6\endcsname}}} - {\showmessage\m!colors3{#5 (def)}}% - \fi}% - \ifcase#4\or - \unexpanded#2{#5}{\switchtocolor[#5]}% \unexpanded toegevoegd - \fi} - -\def\paletcolorspec#1% - {\csname\??cr\currentpalet#1\endcsname} - -%D Hex color support is not enabled by default. You need to say \type -%D {\setupcolor [hex]} to get this working. - -\ifx\colorHpattern\undefined \let\colorHpattern\colorZpattern \fi - -%D New and experimental. - -\def\dodefinespotcolor[#1][#2][#3]% todo: always global - {\doifnot{#1}{#2} - {\@@resetcolorparameters - \doglobal\addtocommalist{#1}\colorlist % optional - \edef\@@cl@@n{#2}% - \getparameters[\??cl @@][#3]% - \doifnothing\@@cl@@p{\let\@@cl@@p\!!plusone}% - \ifx\@@cl@@e\empty \else - \doregisterspotcolorname{#2}\@@cl@@e - \fi - \doglobal\addtocommalist{#2}\allspotcolors - \setxvalue{\??cr#1}{\colorPpattern}% was \setevalue - \setgvalue{#1}{\switchtocolor[#1]}}} % was \setvalue - -\def\registerusedspotcolors - {\ifx\allspotcolors\empty \else - \bgroup - \let\usedspotcolors\empty - \def\docommand##1% - {\doifdefined{\??cs##1}{\addtocommalist{##1}\usedspotcolors}}% - \processcommacommand[\allspotcolors]\docommand - \savecurrentvalue\usedspotcolors\usedspotcolors - \egroup - \fi} - -\def\registerusedcolorchannels - {\bgroup - \doifdefinedelse{\??cs c} - {\def\usedcolorchannels{c,m,y,k}}% - {\let\usedcolorchannels\empty}% - \doifdefined{\??cs r} - {\addtocommalist{r,g,b}\usedcolorchannels}% - \doifdefined{\??cs s} - {\ExpandBothAfter\doifnotinset{k}\usedcolorchannels - {\addtocommalist{s}\usedcolorchannels}}% - \savecurrentvalue\usedcolorchannels\usedcolorchannels - \egroup} - -\prependtoks - \registerusedspotcolors - \registerusedcolorchannels -\to \everylastshipout - -\def\registerusedspotcolor#1% - {\global\@EA\chardef\csname\??cs#1\endcsname\zerocount} - -%D On top of spotcolors, we define multitone colors. You'd better know -%D what you're doing because invalid definitions will lead to invalid -%D documents (i.e.\ resources). - -% \definecolor [darkblue] [c=.5,m=.5] -% \definecolor [darkyellow] [y=.5] -% -% \definemultitonecolor [whatever] [darkblue=.5,darkyellow=.5] [c=.25,m=.25,y=.25] [a=1,t=.5] -% \definemultitonecolor [another] [darkblue=.5,darkyellow=.5] [c=.25,m=.25,y=.25] - -\def\dodefinemultitonecolor[#1][#2][#3][#4]% - {\let\@@cl@@cl@@D\empty % n's - \let\@@cl@@cl@@P\empty % p's - \let\@@cl@@cl@@N\empty % name - \scratchcounter\zerocount - \processcommacommand[#2]\dododefinemultitonecolor - \bgroup - \lccode`\.=`\_% - \lccode`\,=`\_% - \lccode`\:=`\_% - \lccode`\;=`\_% - \lccode`\+=`\_% - \lccode`\-=`\_% - \lccode`\*=`\_% - \lccode`\/=`\_% -% \lccode`\_=`\_% - % not needed, other attribute in driver: - % - % \@@resetcolorparameters - % \getparameters[#4]% - % \ifx\@@cl@@t\@@cl@@z\else - % \edef\@@cl@@cl@@N{\@@cl@@cl@@N_\@@cl@@t_\@@cl@@a}% - % \fi - \lowercase\@EA{\@EA\xdef\@EA\@@cleancolor\@EA{\@@cl@@cl@@N}}% - \egroup - \setxvalue{\??cl\@@cleancolor\s!check}{\noexpand\docheckmultitonecolor{\@@cl@@cl@@D}}% - \expanded{\defineglobalcolor[\@@cleancolor][#3,#4]}% - \expanded{\definespotcolor[#1][\@@cleancolor][#4,f=\the\scratchcounter,p={\@@cl@@cl@@P},d={\@@cl@@cl@@D}]}} - -\def\docheckmultitonecolor#1% - {\flushatshipout - {\let\checkmultitonecolor\gobbleoneargument - \def\docommand##1{\hbox{\definecolor[\s!dummy-100][##1][p=1]\color[\s!dummy-100]}}% - \processcommalist[#1]\docommand}} - -\def\checkmultitonecolor#1% - {\csname\??cl#1\s!check\endcsname\letgvalue{\??cl#1\s!check}\relax} - -\def\dodefinespotcolor[#1][#2][#3]% todo: always global (REDEFINED) - {\doifnot{#1}{#2} - {\@@resetcolorparameters - \doglobal\addtocommalist{#1}\colorlist % optional - \edef\@@cl@@n{#2}% - \getparameters[\??cl @@][#3]% - \doifnothing \@@cl@@p{\let\@@cl@@p\!!plusone}% - \doifsomething\@@cl@@e{\doregisterspotcolorname{#2}\@@cl@@e}% - \doglobal\addtocommalist{#2}\allspotcolors - \setxvalue{\??cr#1}{\colorPpattern}% was \setevalue - \setgvalue{#1}{\switchtocolor[#1]}}}% was \setvalue - -\def\dododefinemultitonecolor#1% - {\advance\scratchcounter\plusone - \splitstring#1\at=\to\!!stringa\and\!!stringb - \ifx\@@cl@@cl@@D\empty - \let\@@cl@@cl@@D\!!stringa - \let\@@cl@@cl@@P\!!stringb - \normalizecolor\!!stringb - \edef\@@cl@@cl@@N{\!!stringa_\!!stringb}% - \else - \edef\@@cl@@cl@@D{\@@cl@@cl@@D,\!!stringa}% - \edef\@@cl@@cl@@P{\@@cl@@cl@@P,\!!stringb}% - \normalizecolor\!!stringb - \edef\@@cl@@cl@@N{\@@cl@@cl@@N_\!!stringa_\!!stringb}% - \fi} - -% \def\dododefinemultitonecolor#1% a/b safe -% {\advance\scratchcounter\plusone -% \splitstring#1\at=\to\@@cl@@one\and\@@cl@@two -% \ifx\@@cl@@cl@@D\empty -% \let\@@cl@@cl@@D\@@cl@@one -% \let\@@cl@@cl@@P\@@cl@@two -% \normalizecolor\@@cl@@two -% \edef\@@cl@@cl@@N{\@@cl@@one_\@@cl@@two}% -% \else -% \edef\@@cl@@cl@@D{\@@cl@@cl@@D,\@@cl@@one}% -% \edef\@@cl@@cl@@P{\@@cl@@cl@@P,\@@cl@@two}% -% \normalizecolor\@@cl@@two -% \edef\@@cl@@cl@@N{\@@cl@@cl@@N_\@@cl@@one_\@@cl@@two}% -% \fi} - -%D The names of colors are stored in a comma separated list -%D only for the purpose of showing them with \type {\showcolor}. -%D -%D \startbuffer -%D \definecolor [SomeKindOfRed] [r=.8,g=.05,b=.05] -%D \stopbuffer -%D -%D \typebuffer -%D \getbuffer -%D -%D This color shows up as \color [SomeKindOfRed] {some kind -%D of red}. -%D -%D \starttyping -%D \setupcolors[state=start] -%D -%D \definecolor[mygreen][green] -%D \definecolor[green][g=.5] -%D -%D \startcolor[mygreen]test\stopcolor -%D -%D \setupcolors[expansion=no] -%D -%D \definecolor[mygreen][green] -%D \definecolor[green][g=.5] -%D -%D \startcolor[mygreen]test\stopcolor -%D \stoptyping - -%D \macros -%D {startcolormode,stopcolormode,permitcolormode} -%D -%D We use \type{\stopcolormode} to reset the color in -%D whatever color space and do so by calling the corresponding -%D special. Both commands can be used for fast color -%D switching, like in colored verbatim, - -\newif\ifpermitcolormode \permitcolormodetrue - -\def\dowithcolor#1#2% #1=\action #2=color - {\ifincolor\ifpermitcolormode - \ifcsname\??cr\currentpalet#2\endcsname - \handlecolorwith#1\csname\??cr\currentpalet#2\endcsname\od - \else\ifcsname\??cr#2\endcsname - \handlecolorwith#1\csname\??cr#2\endcsname\od - \fi\fi - \fi\fi} - -\def\startcolormode % includes \ifincolor\ifpermitcolormode - {%\dostoptransparency % needed for: {test \trans test \notrans test} - \conditionalstoptransparency - \dowithcolor\execcolorRCSP} - -\def\stopcolormode - {\ifincolor\ifpermitcolormode - \supportedstoptransparency - \dostopcolormode - \fi\fi} - -\def\restorecolormode - {\ifincolor\ifpermitcolormode - \supportedstoptransparency - \dostopcolormode - \ifx\maintextcolor\empty \else - \startcolormode\maintextcolor - \fi - \fi\fi} - -%D Color modes are entered using the next set of commands. -%D The \type{\stop} alternatives are implemented in a way -%D that permits non||grouped use. -%D -%D The, for this module redundant, check if we are in color -%D mode is needed when we use these macros in other modules. - -\def\execcolorRCSP#1:% - {\csname execcolor#1\endcsname} - -\def\execcolorR - {\iffilterspotcolor - \@EA\noexeccolorR - \else - \@EA\doexeccolorR - \fi} - -\def\execcolorC - {\iffilterspotcolor - \@EA\noexeccolorC - \else - \@EA\doexeccolorC - \fi} - -\def\execcolorS - {\iffilterspotcolor - \@EA\noexeccolorS - \else - \@EA\doexeccolorS - \fi} - -\def\execcolorP - {\iffilterspotcolor - \@EA\doexeccolorPP - \else\ifcase\currentcolorchannel - \@EAEAEA\doexeccolorP - \else - \@EAEAEA\noexeccolorP - \fi\fi} - -\def\doexeccolorR#1:#2:#3:% - {\edef\@@cl@@r{#1}\edef\@@cl@@g{#2}\edef\@@cl@@b{#3}% - \ifpreferGRAY\ifx\@@cl@@r\@@cr@@g\ifx\@@cl@@r\@@cl@@b - \GRAYpreferedtrue - \fi\fi\fi - \ifincolor\else\RGBsupportedfalse\CMYKsupportedfalse\fi - \ifGRAYprefered - \registercolorchannel\c!s - \let\@@cl@@s\@@cl@@r - \normalizeGRAY - \doexeccolorgray - \else\ifRGBsupported - \registercolorchannel\c!r - \normalizeRGB - \doexeccolorrgb - \else\ifCMYKsupported - \registercolorchannel\c!c - \convertRGBtoCMYK\@@cl@@r\@@cl@@g\@@cl@@b - \normalizeCMYK - \doexeccolorcmyk - \else - \registercolorchannel\c!s - \convertRGBtoGRAY\@@cl@@r\@@cl@@g\@@cl@@b - \normalizeGRAY - \doexeccolorgray - \fi\fi\fi - \exectransparency} - -\def\doexeccolorC#1:#2:#3:#4:% - {\edef\@@cl@@c{#1}\edef\@@cl@@m{#2}\edef\@@cl@@y{#3}\edef\@@cl@@k{#4}% - \ifpreferGRAY\ifx\@@cl@@k\@@cl@@z\ifx\@@cl@@c\@@cr@@m\ifx\@@cl@@c\@@cl@@y - \GRAYpreferedtrue - \fi\fi\fi\fi - \ifincolor\else\RGBsupportedfalse\CMYKsupportedfalse\fi - \ifGRAYprefered - \registercolorchannel\c!s - \let\@@cl@@s\@@cl@@c - \normalizeGRAY - \doexeccolorgray - \else\ifCMYKsupported - \registercolorchannel\c!c - \ifreduceCMYK - \convertCMYKtoCMY\@@cl@@c\@@cl@@m\@@cl@@y\@@cl@@k - \fi - \normalizeCMYK - \doexeccolorcmyk - \else\ifRGBsupported - \registercolorchannel\c!r - \convertCMYKtoRGB\@@cl@@c\@@cl@@m\@@cl@@y\@@cl@@k - \normalizeRGB - \doexeccolorrgb - \else - \registercolorchannel\c!s - \convertCMYKtoGRAY\@@cl@@c\@@cl@@m\@@cl@@y\@@cl@@k - \normalizeGRAY - \doexeccolorgray - \fi\fi\fi - \exectransparency} - -\def\doexeccolorS#1:% - {\edef\@@cl@@s{#1}% - \registercolorchannel\c!s - \normalizeGRAY - \doexeccolorgray - \exectransparency} - -% \def\doexeccolorP#1:#2:% -% {\edef\@@cl@@n{#1}% -% \edef\@@cl@@p{#2}% -% \registerusedspotcolor\@@cl@@n -% \ifSPOTsupported -% \dowithcolor\registerspotcolor\@@cl@@n -% \dostartspotcolormode\@@cl@@n\@@cl@@p -% \else -% \doingspotcolortrue -% \let\spotcolorfactor\@@cl@@p -% \factorizecolortrue % using counter and array -% \dowithcolor\execcolorRCSP\@@cl@@n -% \factorizecolorfalse -% \let\spotcolorfactor\@@cl@@o -% \doingspotcolorfalse -% \fi -% \exectransparency} - -\def\doexeccolorP#1:#2:#3:#4:% - {\edef\@@cl@@n{#1}% name - \edef\@@cl@@f{#2}% fractions - \edef\@@cl@@d{#3}% definitions - \edef\@@cl@@p{#4}% - \ifx\@@cl@@d\empty - \let\@@cl@@d\@@cl@@n - \fi - \registerusedspotcolor\@@cl@@n - \ifSPOTsupported - \checkmultitonecolor\@@cl@@n - \dowithcolor\registerspotcolor\@@cl@@n - \dostartspotcolormode\@@cl@@n\@@cl@@p - \else - \doingspotcolortrue - \normalizespotcolor\@@cl@@p - \let\spotcolorfactor\@@cl@@p - \factorizecolortrue % using counter and array - \dowithcolor\execcolorRCSP\@@cl@@n - \factorizecolorfalse - \let\spotcolorfactor\@@cl@@o - \doingspotcolorfalse - \fi - \exectransparency} - -\def\doexeccolorPindex#1:#2:#3:#4:% - {\edef\@@cl@@n{#1}% - \edef\@@cl@@f{#2}% - \edef\@@cl@@d{#3}% - \edef\@@cl@@p{#4}% - \ifx\@@cl@@d\empty - \let\@@cl@@d\@@cl@@n - \fi - \ifSPOTsupported - \checkmultitonecolor\@@cl@@n - \dowithcolor\registerindexcolor\@@cl@@n - \fi - \noexectransparency} - -\def\doexeccolorPP#1:#2:% - {\edef\@@cl@@n{#1}% - \edef\@@cl@@p{#2}% - \registerusedspotcolor\@@cl@@n - \ifx\@@cl@@n\currentspotcolor - \normalizeSPOT - \dostartgraycolormode\@@cl@@p % was spotcolormode - \else - \dovidecolor\@@cl@@p\@@cl@@o - \fi - \exectransparency} - -\def\doexeccolorrgb - {\ifcase\currentcolorchannel - \dostartrgbcolormode\@@cl@@r\@@cl@@g\@@cl@@b - \or \or \or \or - \or \dostartgraycolormode\@@cl@@r - \or \dostartgraycolormode\@@cl@@g - \or \dostartgraycolormode\@@cl@@b - \fi} - -\def\doexeccolorcmyk - {\ifcase\currentcolorchannel - \dostartcmykcolormode\@@cl@@c\@@cl@@m\@@cl@@y\@@cl@@k - \or \negatecolorcomponent\@@cl@@c\dostartgraycolormode\@@cl@@c - \or \negatecolorcomponent\@@cl@@m\dostartgraycolormode\@@cl@@m - \or \negatecolorcomponent\@@cl@@y\dostartgraycolormode\@@cl@@y - \or \negatecolorcomponent\@@cl@@k\dostartgraycolormode\@@cl@@k - \fi} - -\def\doexeccolorgray - {\ifcase\currentcolorchannel - \dostartgraycolormode\@@cl@@s - \or \or \or - \or \dostartgraycolormode\@@cl@@s - \or \or \or - \or \dostartgraycolormode\@@cl@@s - \fi} - -%D When filtering colors, we need to either erase -%D the background, or ignore the foreground. - -% \newif\ifhidesplitcolor \hidesplitcolortrue -% -% \def\noexeccolor#1\od -% {\dostartgraycolormode\@@cl@@o} -% -% \let\noexeccolorS\noexeccolor -% \let\noexeccolorP\noexeccolor - -%D Well, here comes some real trickery. When we have the 100\% -%D spot color or black color, we don't want to erase the -%D background. So, instead we hide the content by giving it -%D zero transparency. - -% todo : #1#2#3 met #2 > of < and #3 een threshold - -\def\dohidecolor#1#2% - {\ifhidesplitcolor - \ifx#1#2% - \dostartgraycolormode\@@cl@@o - \else - \doregisternonecolor - \dostartnonecolormode - \fi - \else - \dostartgraycolormode\@@cl@@o - \fi} - -\def\dovidecolor#1#2% - {\ifhidesplitcolor - \ifx#1#2% - \doregisternonecolor - \dostartnonecolormode - \else - \dostartgraycolormode\@@cl@@o - \fi - \else - \dostartgraycolormode\@@cl@@o - \fi} - -% \def\fullytransparentcolor % fails on floats -% {\dostartgraycolormode\@@cl@@o % better than z -% %\global\@EA\chardef\csname\@@currenttransparent\endcsname\plusone -% %\global\intransparenttrue -% \dostarttransparency10} - -\def\noexeccolorR#1:#2:#3:#4\od - {\edef\@@cl@@r{#1}\edef\@@cl@@g{#2}\edef\@@cl@@b{#3}% - \dohidecolor\@@cl@@s\@@cl@@o} - -\def\noexeccolorC#1:#2:#3:#4:#5\od - {\edef\@@cl@@c{#1}\edef\@@cl@@m{#2}\edef\@@cl@@y{#3}\edef\@@cl@@k{#4}% - \dohidecolor\@@cl@@s\@@cl@@o} - -\def\noexeccolorS#1:#2\od - {\edef\@@cl@@s{#1}% - \dohidecolor\@@cl@@s\@@cl@@o} - -\def\noexeccolorP#1:#2:#3:#4:#5\od - {\edef\@@cl@@p{#4}% - \dohidecolor\@@cl@@p\@@cl@@z} - -%D For the sake of postprocessing (i.e.\ color separation) -%D we can normalize colors, which comes down to giving equal -%D values an equal accuracy and format. This feature is -%D turned off by default due to a speed penalty. This macro -%D also handles spot color percentages. - -\newif\iffactorizecolor -\newif\ifnormalizecolor - -\def\spotcolorfactor{1} - -% \def\normalizecolor#1% -% {\colordimen#1\thousandpoint -% \colordimen\spotcolorfactor\colordimen -% \colorcount\colordimen -% \advance\colorcount \medcard -% \divide\colorcount \maxcard -% \edef#1{\realcolorvalue\colorcount}} - -\def\normalizecolor#1% - {\colorcount\numexpr(\dimexpr\spotcolorfactor\dimexpr#1\thousandpoint\relax\relax+\medcard)/\maxcard\relax - \edef#1{\realcolorvalue\colorcount}} - -% \def\normalizespotcolor#1% -% {\colordimen-#1\thousandpoint -% \advance\colordimen\thousandpoint -% \colorcount\colordimen -% \advance\colorcount \medcard -% \divide\colorcount \maxcard -% \edef#1{\realcolorvalue\colorcount}} - -\def\normalizespotcolor#1% - {\colorcount\numexpr(\dimexpr\thousandpoint-#1\thousandpoint\relax+\medcard)/\maxcard\relax - \edef#1{\realcolorvalue\colorcount}} - -\def\donormalizeRGB - {\normalizecolor\@@cl@@r - \normalizecolor\@@cl@@g - \normalizecolor\@@cl@@b} - -\def\normalizeRGB - {\ifnormalizecolor - \donormalizeRGB - \else\iffactorizecolor - \donormalizeRGB - \fi\fi} - -\def\donormalizeCMYK - {\normalizecolor\@@cl@@c - \normalizecolor\@@cl@@m - \normalizecolor\@@cl@@y - \normalizecolor\@@cl@@k} - -\def\normalizeCMYK - {\ifnormalizecolor - \donormalizeCMYK - \else\iffactorizecolor - \donormalizeCMYK - \fi\fi} - -\def\donormalizeGRAY - {\normalizecolor\@@cl@@s} - -\def\normalizeGRAY - {\ifnormalizecolor - \donormalizeGRAY - \else\iffactorizecolor - \donormalizeGRAY - \fi\fi} - -\def\normalizeSPOT - {\normalizespotcolor\@@cl@@p} - -%D We need to register spot colors (i.e.\ resources need to -%D be created. - -\def\registerspotcolor#1:% - {\ifcsname\??cl:\c!p:\@@cl@@n\endcsname - \@EA\dontregisterspotcolor - \else - \letgvalue{\??cl:\c!p:\@@cl@@n}\empty - %\@EA\@EA\csname registerspotcolor#1\endcsname - \csname registerspotcolor#1\@EA\endcsname - \fi} - -% todo: convert to rgb if needed, will will do this in mkiv - -\def\dontregisterspotcolor #1\od{} -\def\registerspotcolorR #1:#2:#3:#4\od{\doregisterrgbspotcolor \@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#1}{#2}{#3}} -\def\registerspotcolorC#1:#2:#3:#4:#5\od{\doregistercmykspotcolor\@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#1}{#2}{#3}{#4}} -\def\registerspotcolorS #1:#2\od{\doregistergrayspotcolor\@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#1}} -\def\registerspotcolorP #1:#2:#3\od{\doregistergrayspotcolor\@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#2}} - -%D Experimental feature: - -% \definecolor [darkblue] [c=1,m=.38,y=0,k=.64] % pantone pms 2965 uncoated m -% \definecolor [darkyellow] [c=0,m=.28,y=1,k=.06] % pantone pms 124 uncoated m -% -% \definecolor [darkblue-50] [darkblue] [p=.5] -% \definecolor [darkyellow-50] [darkyellow] [p=.5] -% \definecolor [darkblue-80] [darkblue] [p=.8] -% \definecolor [darkyellow-80] [darkyellow] [p=.8] -% -% \definecolor [darkblue,darkyellow] [r=.8] -% \definecolor [darkdull-5030] [darkblue,darkyellow] [p={.5,.3}] -% -% \setupcolors[state=start] -% -% \blackrule[width=4cm,height=3cm,color=darkblue-50] -% \blackrule[width=4cm,height=3cm,color=darkblue-80] -% \blackrule[width=4cm,height=3cm,color=darkyellow-50] -% \blackrule[width=4cm,height=3cm,color=darkyellow-80] -% \blackrule[width=4cm,height=3cm,color=darkdull-5030] - -%D Experimental too (special purpose code). - -\def\registerindexcolor#1:% - {\ifcsname\??cl:i:\@@cl@@n\endcsname - \@EA\dontregisterindexcolor - \else - \letgvalue{\??cl:i:\@@cl@@n}\empty % signal - \showmessage\m!colors{12}\@@cl@@n - \@EA\@EA\csname registerindexcolor#1\endcsname - \fi} - -\let\dontregisterindexcolor\dontregisterspotcolor - -\def\registerindexcolorR #1:#2:#3:#4\od{\doregisterrgbindexcolor \@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#1}{#2}{#3}} -\def\registerindexcolorC#1:#2:#3:#4:#5\od{\doregistercmykindexcolor\@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#1}{#2}{#3}{#4}} -\def\registerindexcolorS #1:#2\od{\doregistergrayindexcolor\@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#1}} -\def\registerindexcolorP #1:#2:#3\od{\doregistergrayindexcolor\@@cl@@n\@@cl@@f\@@cl@@d\@@cl@@p{#2}} - -\def\predefinecolor[#1]% - {\bgroup - \flushatshipout{\hbox{\localcolortrue\color[#1]{}}}% real ones - \egroup} - -\def\predefineindexcolor[#1]% - {\bgroup - \flushatshipout{\hbox{\localcolortrue\color[#1]{}}}% real ones - \let\doexeccolorP\doexeccolorPindex - \flushatshipout{\hbox{\localcolortrue\color[#1]{}}}% index one - \egroup} - -% \def\checkpredefinedcolor[#1]% -% {\ifcase\internalspotcolorsize{#1}\relax -% \@EA\predefinecolor\or\@EA\predefinecolor\else\@EA\predefineindexcolor -% \fi[#1]} - -\let\checkpredefinedcolor\predefineindexcolor % we need an index in order to negate bitmaps - -%D Transparency is handled similar for all three color modes. We -%D can turn transparency off with the following switch: - -\newif\iftransparencysupported \transparencysupportedtrue % only mkii - -\def\exectransparency - {\iftransparencysupported - \expandafter\doexectransparency - \else - \expandafter\noexectransparency - \fi} - -\def\doexectransparency#1:#2\od - {\ifcase#1\space - \global\intransparentfalse - \else - \global\intransparentfalse - %\dostarttransparency{#1}{#2}% - \supportedstarttransparency{#1}{#2}% - \global\intransparenttrue - \fi} - -\def\noexectransparency#1\od - {} - -%D Experimental: minimize transparency resets. - -\newif\ifintransparent -\newif\ifoptimizetransparency \optimizetransparencytrue % under test - -\let\supportedstoptransparency\relax - -\def\conditionalstoptransparency - {\ifoptimizetransparency - \ifintransparent - \global\intransparentfalse - \supportedstoptransparency - \fi - \else - \supportedstoptransparency - \fi} - -\def\supportedstarttransparency - {\iftransparencysupported - \globallet\supportedstoptransparency\dostoptransparency - \expandafter\dostarttransparency - \else - \expandafter\gobbletwoarguments - \fi} - -%D We now use the \type {\@@cl@@A} hook to implement -%D symbolic names. These are converted into numbers -%D at definition time (which saves runtime). - -\def\dodefinetransparency[#1][#2]% - {\@EA\chardef\csname\??cl-#1\endcsname#2\relax} - -\def\transparencynumber#1% - {\number\executeifdefined{\??cl-#1}{#1}} - -%D Now we hook 'm into the patterns: - -\def\@@cl@@A{\transparencynumber\@@cl@@a} - -%D The next macro can be used to return to the (normal) -%D page color. This macro is used in the same way as -%D \type {\color}. - -\def\startregistercolor[#1]% - {\permitcolormodefalse\startcolor[#1]\permitcolormodetrue} - -\def\stopregistercolor - {\permitcolormodefalse\stopcolor\permitcolormodetrue} - -\def\starttextcolor[#1]% - {\doifsomething{#1} - {\bgroup - \def\stoptextcolor % also goes ok with \page after - {\let\maintextcolor\empty % this one because the top of - \stopregistercolor % page sets the color right (side - \egroup}% % effect) - \def\starttextcolor[##1]% - {\bgroup - % \@@themaintextcolor==##1 is catched in \definecolor - \definecolor[\@@themaintextcolor][##1]% - \let\stoptextcolor\egroup}% - \startregistercolor[\@@themaintextcolor]% - \definecolor[\@@themaintextcolor][#1]% - \let\maintextcolor\@@themaintextcolor}} - -\let\stoptextcolor\relax - -%D The following hook permits proper support at the text -%D level. This definition actually belongs in another -%D module. (May need a different \MKIV\ implementation.) - -% \ifx\initializemaintextcolor\undefined - - \let\@@clprevcolor\empty - - \def\initializemaintextcolor - {% saveguard for setting text color to empty after - % it has been set - \doifnothing\@@cltextcolor - {\ifx\@@clprevcolor\empty\else - \let\@@cltextcolor\defaulttextcolor - \fi}% - \doifelsenothing\@@cltextcolor - {\let\maintextcolor\empty} - {\edef\@@clprevcolor{\@@cltextcolor}% - \let\maintextcolor\@@themaintextcolor - \doifelsenothing\@@cltextcolor % another saveguard - {\definecolor[\@@themaintextcolor][\defaulttextcolor]}% - {\definecolor[\@@themaintextcolor][\@@cltextcolor]}% - \doinitializemaintextcolor}} - - \def\doinitializemaintextcolor - {\appendtoks\starttextcolor[\@@themaintextcolor]\to\everystarttext - \appendtoks\stoptextcolor \to\everystoptext - \let\doinitializemaintextcolor\relax} - -% \fi - -\def\localstarttextcolor - {\ifx\maintextcolor\empty - \startcolormode\defaulttextcolor - \else - \startcolormode\maintextcolor - \fi} - -% \def\localstoptextcolor -% {\stopcolormode} -% -% better: - -\def\localstoptextcolor - {\restorecolormode} - -\def\restoretextcolor - {\ifx\maintextcolor\empty - \expandafter\dorestoretextcolor - \else - % obey main text color - \fi} - -\def\dorestoretextcolor - {\color[\defaulttextcolor]} - -%D We use some reserved names for local color components. -%D Consistent use of these scratch variables saves us -%D unneccessary hash entries. -%D -%D \starttyping -%D \@@cl@@r \@@cl@@g \@@cl@@b -%D \@@cl@@c \@@cl@@m \@@cl@@y \@@cl@@k -%D \@@cl@@s -%D \stoptyping -%D -%D We implement several conversion routines. -%D -%D \starttyping -%D \convertRGBtoCMYK {r} {g} {b} -%D \convertRGBtoGRAY {r} {g} {b} -%D \convertCMYKtoRGB {c} {m} {y} {k} -%D \convertCMYKtoGRAY {c} {m} {y} {k} -%D \convertCMYKtoCMY {c} {m} {y} {k} -%D \stoptyping -%D -%D The relation between \cap{Gray}, \cap{RGB} and \cap{CMYK} -%D is: -%D -%D \placeformula[-] -%D \startformula -%D G = .30r + .59g + .11b -%D = 1.0 - \min(1.0,\ .30c + .59m + .11y + k) -%D \stopformula -%D -%D When converting from \cap{CMYK} to \cap{RGB} we use the -%D formula: -%D -%D \placeformula[-] -%D \startformula -%D \eqalign -%D {r &= 1.0 - \min(1.0,\ c+k) \cr -%D g &= 1.0 - \min(1.0,\ m+k) \cr -%D b &= 1.0 - \min(1.0,\ y+k)} -%D \stopformula -%D -%D In the conversion routine the color components are calculated -%D in three digits precision. - -\def\realcolorvalue#1% - {\ifnum#1>\zerocount % important, first encountered in --modu supp-mpe - \ifnum#1<\plusten 0.00\the#1\else - \ifnum#1<\plushundred 0.0\the#1\else - \ifnum#1<\plusthousand 0.\the#1\else - 1\fi\fi\fi - \else 0\fi} - -% \def\doconvertCMYKtoRGB#1\k#2\to#3% -% {\ifdim#2\points>#1\points% >= problem, repaired 2/12/2002 -% \let#3\@@cl@@z % k >= color -% \else -% \colordimen\onepoint -% \advance\colordimen -#1\points -% \advance\colordimen -#2\points -% \multiply\colordimen \plusthousand -% \colorcount\colordimen -% \advance\colorcount \medcard -% \divide\colorcount \maxcard -% \edef#3{\realcolorvalue\colorcount}% -% \fi} - -\def\doconvertCMYKtoRGB#1\k#2\to#3% - {\colorcount\numexpr(\dimexpr\plusthousand\dimexpr\onepoint-#1\points-#2\points\relax\relax+\medcard)/\maxcard\relax - \ifnum\colorcount>\zeropoint - \edef#3{\realcolorvalue\colorcount}% - \else - \let#3\@@cl@@z - \fi} - -\def\convertCMYKtoRGB#1#2#3#4% - {\doconvertCMYKtoRGB#1\k#4\to\@@cl@@r - \doconvertCMYKtoRGB#2\k#4\to\@@cl@@g - \doconvertCMYKtoRGB#3\k#4\to\@@cl@@b} - -% \def\doconvertRGBtoCMYK#1\to#2% -% {\colordimen#1\points -% \multiply\colordimen \plusthousand -% \colorcount\colordimen -% \advance\colorcount \medcard -% \divide\colorcount \maxcard -% \colorcount-\colorcount -% \advance\colorcount \plusthousand -% \edef#2{\realcolorvalue\colorcount}} - -\def\doconvertRGBtoCMYK#1\to#2% - {\colorcount\numexpr\plusthousand-(\dimexpr\plusthousand\dimexpr#1\points\relax\relax+\medcard)/\maxcard\relax - \edef#2{\realcolorvalue\colorcount}} - -\def\convertRGBtoCMYK#1#2#3% - {\doconvertRGBtoCMYK#1\to\@@cl@@c - \doconvertRGBtoCMYK#2\to\@@cl@@m - \doconvertRGBtoCMYK#3\to\@@cl@@y - \let\@@cl@@k\@@cl@@z} - -%D The following switch is mainly meant for (hidden) -%D documentation purposes. - -\def\nGRAYfactor{333.333} -\def\rGRAYfactor{\ifweightGRAY300\else\nGRAYfactor\fi} -\def\gGRAYfactor{\ifweightGRAY590\else\nGRAYfactor\fi} -\def\bGRAYfactor{\ifweightGRAY110\else\nGRAYfactor\fi} - -% \def\convertRGBtoGRAY#1#2#3% -% {\colordimen#1\points -% \colordimen\rGRAYfactor\colordimen -% \colorcount\colordimen -% \colordimen#2\points -% \colordimen\gGRAYfactor\colordimen -% \advance\colorcount \colordimen -% \colordimen#3\points -% \colordimen\bGRAYfactor\colordimen -% \advance\colorcount \colordimen -% \advance\colorcount \medcard -% \divide\colorcount \maxcard -% \edef\@@cl@@s{\realcolorvalue\colorcount}} - -\def\convertRGBtoGRAY#1#2#3% - {\colorcount\numexpr - (\dimexpr\rGRAYfactor\dimexpr#1\points\relax\relax+ - \dimexpr\gGRAYfactor\dimexpr#2\points\relax\relax+ - \dimexpr\bGRAYfactor\dimexpr#3\points\relax\relax+ - \medcard)/\maxcard - \relax - \edef\@@cl@@s{\realcolorvalue\colorcount}} - -\def\convertCMYKtoGRAY#1#2#3#4% - {\convertCMYKtoRGB{#1}{#2}{#3}{#4}% - \convertRGBtoGRAY\@@cl@@r\@@cl@@g\@@cl@@b} - -% \def\doconvertCMYKtoCMY#1\k#2\to#3% -% {\colordimen#1\points -% \advance\colordimen #2\points\relax -% \ifdim\colordimen>\onepoint -% \colordimen\onepoint -% %\else -% % \colordimen\colordimen -% \fi -% \multiply\colordimen \plusthousand -% \colorcount\colordimen -% \advance\colorcount \medcard -% \divide\colorcount \maxcard -% \edef#3{\realcolorvalue\colorcount}} - -\def\doconvertCMYKtoCMY#1\k#2\to#3% - {\colorcount\numexpr(\dimexpr\plusthousand\dimexpr#1\points+#2\points\relax\relax+\medcard)/\maxcard\relax - \ifnum\colorcount>\plusthousand - \let#3\@@cl@@o - \else - \edef#3{\realcolorvalue\colorcount}% - \fi} - -\def\convertCMYKtoCMY#1#2#3#4% - {\doconvertCMYKtoCMY#1\k#4\to\@@cl@@c - \doconvertCMYKtoCMY#2\k#4\to\@@cl@@m - \doconvertCMYKtoCMY#3\k#4\to\@@cl@@y - \let\@@cl@@k\@@cl@@z} - -%D Before we present the color macros, we first define the -%D setup command. This command takes care of setting up the -%D booleans that control local and global behavior (more on -%D that later) and conversion to other color spaces. - -\let\currentspotcolor \empty -\let\previousspotcolor\empty - -%D The tests depend on the use of constants. If we use the \MKIV\ -%D method we can share more. - -\def\doifcolorelse#1% - {\ifcsname\??cr\ifcsname\??cr\currentpalet#1\endcsname\currentpalet\fi#1\endcsname - \expandafter\firstoftwoarguments - \else - \expandafter\secondoftwoarguments - \fi} - -\def\doifcolor#1% - {\ifcsname\??cr\ifcsname\??cr\currentpalet#1\endcsname\currentpalet\fi#1\endcsname - \expandafter\firstofoneargument - \else - \expandafter\gobbleoneargument - \fi} - -%D There are a couple of different color switching macros, -%D the local ones can be used to speed up things (only in \MKII). - -\def\localstartcolor - {\ifincolor - \localcolortrue - \expandafter\doglobalstartcolor - \else - \expandafter\noglobalstartcolor - \fi} - -\def\localstopcolor - {\ifincolor - \doglobalstopcolor - \else - \noglobalstopcolor - \fi} - -\unexpanded\def\startcolor - {\ifincolor - \expandafter\doglobalstartcolor - \else - \expandafter\noglobalstartcolor - \fi} - -\unexpanded\def\stopcolor - {\ifincolor - \doglobalstopcolor - \else - \noglobalstopcolor - \fi} - -%D This macros call the global color switching ones. Starting -%D a global, i.e. a possible page boundary crossing, color -%D mode also sets a \type{\mark} in \TEX's internal list. - -\newcount\colorlevel - -\letvalue{\??cl0C}\empty % saved color -\letvalue{\??cl0S}\empty % stop command - -%D We keep a positive color stack for foreground colors, and -%D a negative one for backgrounds. Not that brilliant a -%D solution, but it suits. The signs are swapped when the -%D page ornaments are typeset. - -\let\@@colorplus \plusone -\let\@@colorminus\minusone - -\def\@@currentcolorname {\??cl\the\colorlevel C} -\def\@@currentcolorstop {\??cl\the\colorlevel S} -%def\@@currenttransparent{\??cl\the\colorlevel T} - -\letvalue{\??cl*\s!black}\s!black - -\def\currentcolorname - {\csname - \ifcsname\@@currentcolorname\endcsname - \expandafter\ifx\csname\@@currentcolorname\endcsname\empty - \??cl*\s!black - \else - \@@currentcolorname - \fi - \else - \??cl*\s!black - \fi - \endcsname} - -\def\outercolorname - {\ifcsname\@@currentcolorname\endcsname - \expandafter\ifx\csname\@@currentcolorname\endcsname\empty - \s!black - \else - currentcolor% - \fi - \else - \s!black - \fi} - -% not the following, because we need a different tag in order to trick the stack -% -% \def\outercolorname{\executeifdefined\@@currentcolorname\s!black} -% -% \def\startcurrentcolor{\expanded{\startcolor[\s!black]\noexpand\startcolor[\outercolorname]}} -% \def\stopcurrentcolor {\stopcolor\stopcolor} -% -% test case: -% -% \setupcolors[state=start,textcolor=red] -% \starttext -% red -% \color[green]{green -% \startMPcode -% label(\sometxt{green\color[blue]{blue}green}, origin) withcolor red; -% draw fullcircle scaled 1cm xscaled 2; -% \stopMPcode -% green} -% red -% \stoptext - -\def\dodoglobalstartcolor - {\global\@EA\let\@EA\@@currentcolor\csname\@@currentcolorname\endcsname - \global\advance\colorlevel \@@colorplus - \global\@EA\let\csname\@@currentcolorname\endcsname\@@askedcolor - %\debuggerinfo\m!colors - % {start \@@askedcolor\space at level \the\colorlevel}% - \ifx\@@askedcolor\empty - \global\@EA\let\csname\@@currentcolorname\endcsname\@@currentcolor - \global\@EA\let\csname\@@currentcolorstop\endcsname\donoglobalstopcolor - \else\ifx\@@askedcolor\@@currentcolor - \global\@EA\let\csname\@@currentcolorstop\endcsname\donoglobalstopcolor - \else - \doifcolorelse\@@askedcolor - {%\docolormark\@@askedcolor - \ifpermitcolormode\docolormark\@@askedcolor\fi - \global\@EA\let\csname\@@currentcolorstop\endcsname\dodoglobalstopcolor - \startcolormode\@@askedcolor} - {\global\@EA\let\csname\@@currentcolorstop\endcsname\donoglobalstopcolor - \showmessage\m!colors3\@@askedcolor\empty}% - \fi\fi} - -\def\doglobalstartcolor[#1]% - {\edef\@@askedcolor{#1}% - \ifcase\colorlevel\relax - \ifx\@@askedcolor\empty - \global\@EA\let\csname\@@currentcolorstop\endcsname\empty - \else - \dodoglobalstartcolor - \fi - \else - \dodoglobalstartcolor - \fi - \ignorespaces} - -\def\noglobalstartcolor[#1]% - {} - -\def\dodoglobalstopcolor - {\ifcase\colorlevel \else - \donoglobalstopcolor - \global\@EA\let\@EA\@@previouscolor\csname\@@currentcolorname\endcsname - \ifcase\colorlevel\relax - \ifpermitcolormode - \docolormark\empty - \conditionalstoptransparency - \dostopcolormode - \fi - \else % let's do a bit redundant testing here - \docolormark\@@previouscolor - \ifx\@@previouscolor\empty - \ifpermitcolormode - \conditionalstoptransparency - \dostopcolormode - \fi - \else - \doifcolorelse\@@previouscolor - {\ifx\@@currentcolor\@@previouscolor\else - % alternatively we could let \startcolormode handle this - \ifpermitcolormode - \conditionalstoptransparency % really needed - % more safe but less efficient: \dostopcolormode - \fi - \startcolormode\@@previouscolor - \fi} - {\ifpermitcolormode - \conditionalstoptransparency - \dostopcolormode - \fi}% - \fi - \fi - \fi} - -\def\donoglobalstopcolor - {\ifcase\colorlevel \else - \global\@EA\let\@EA\@@currentcolor\csname\@@currentcolorname\endcsname - %\debuggerinfo{\m!colors} - % {stop \@@currentcolor\normalspace at level \the\colorlevel}% - \global\advance\colorlevel \@@colorminus - \fi} - -\def\doglobalstopcolor - {\csname\@@currentcolorstop\endcsname} - -\let\noglobalstopcolor\relax - -\let\faststartcolor\doglobalstartcolor -\let\faststopcolor \doglobalstopcolor - -%D We don't use grouping and save each stop alternative. This -%D permits be especially useful in for instance local color -%D support in verbatim. Using \type{\bgroup}||\type{\egroup} -%D pairs could interfere with calling commands - -%D This color mechanism takes care of nested colors, like in: -%D -%D \startbuffer -%D \color[green]{groen \color[green]{groen \color[red]{rood}} groen} -%D \color[green]{groen \color[]{groen \color[red]{rood}} groen} -%D \color[green]{groen \color[red]{rood \color[red]{rood}} groen} -%D \color[green]{groen \color[green]{groen \color[]{groen}} groen} -%D \color[green]{groen \color[red]{rood} groen} -%D \color[green]{groen \color[]{groen} groen} -%D \color[]{zwart \color[red]{rood} zwart} -%D \color[]{zwart} -%D \stopbuffer -%D -%D \typebuffer -%D -%D or -%D -%D \startvoorbeeld -%D \startlines -%D \getbuffer -%D \stoplines -%D \stopvoorbeeld -%D -%D Crossing page boundaries is of course also handled. -%D Undefined or empty color specifications are treated as -%D efficient as possible. -%D -%D \startbuffer -%D \startcolor[green] -%D [green] \input tufte [green] \par -%D \startcolor[] -%D [green] \input knuth [green] \par -%D \startcolor[red] -%D [red] \input tufte [red] \par -%D \startcolor[yellow] -%D [yellow] \input knuth [yellow] \par -%D \stopcolor -%D [red] \input tufte [red] \par -%D \stopcolor -%D [green] \input knuth [green] \par -%D \stopcolor -%D [green] \input tufte [green] \par -%D \stopcolor -%D \stopbuffer -%D -%D \startpacked -%D \getbuffer -%D \stoppacked -%D -%D These quotes are typeset by saying: -%D -%D \typebuffer - -%D We already mentioned that colors interfere with building -%D the pagebody. This means that when the page is composed, -%D the colors temporary have to be reset. After the page is -%D shipped out, we have to revive the current color. -%D -%D We use \type{\mark}s to keep track of colors across page -%D boundaries. Unfortunately standard \TEX\ supports only one mark, -%D and using this one for color support only would be a waste. -%D We therefore use an adapted version of J.~Fox's multiple mark -%D mechanism as (re|)|implemented in \module{supp-mrk}. - -\doifdefinedelse{rawnewmark} - {\rawnewmark\colormark} - {\let\colormark\gobbleoneargument} - -%D Using this mark mechanism with lots of colors has one -%D major drawback: \TEX's memory tends to overflow when -%D very colorful text is stored in a global box. Even worse is that -%D the processing time grows considerably. We therefore support -%D local as well as global color switching. -%D -%D Of the next macros, \type {\popcolor} is to be used after -%D the actual \type {\shipout} and \type {\startcolorpage} and -%D \type {\stopcolorpage} are called when entering and leaving -%D the \type {\pagebody} builder. In case of emergencies -%D \type {\pushcolor} can be used to undo the current color, -%D for instance when insertions are appended to the page. -%D -%D Out of efficiency we only use marks when needed. The next -%D macro tries to find out if indeed a mark should be set. -%D This macro uses the boolean \type {\ifinpagebody}, which can -%D be defined and set in the module that handles the pagebody. - -\def\docolormark#1% - {\iflocalcolor \else \ifinpagebody \else \ifinframed \else - \dodocolormark{#1}% - \fi \fi \fi} - -\let\lastcolormark=\empty - -\def\dodocolormark#1% - {\edef\newcolormark{#1}% - \ifx\newcolormark\lastcolormark\else - \global\let\lastcolormark\newcolormark - \@EA\rawsetmark\@EA\colormark\@EA{\lastcolormark}% - \fi} - -%D \macros -%D {pushcolor, popcolor} -%D -%D Pushing the current state in the output routine simply comes -%D to resetting the color to black, while popping restores the -%D color state to that of before the break. - -\def\topofpagecolor{\rawgetbotmark\colormark} % see postponing - -\def\pushcolor - {\stopcolormode} - -\def\popcolor - {\doifsomething{\rawgetbotmark\colormark} - {%\debuggerinfo\m!colors{popping \getbotmark\colormark}% - \startcolormode{\rawgetbotmark\colormark}}} - -\def\popsplitcolor - {\getsplitmarks\colormark % hier wel - \doifsomething{\rawgetsplitbotmark\colormark} - {%\debuggerinfo\m!colors{split popping \getsplitbotmark\colormark}% - \startcolormode{\rawgetsplitbotmark\colormark}}} - -\appendtoks\pushcolor \to\everypushproperties -\appendtoks\popcolor \to\everypopproperties -\appendtoks\popsplitcolor\to\everypopsplitproperties - -% Private macro: only needed in test cases (like multiple -% seperations in one file); no user command! - -\def\resynccolor - {\ifcase\pagetotal % \ifdim\pagetotal=\zeropoint - \popcolor - \else\ifx\@@currentcolor\empty - \ifx\maintextcolor\empty\else - \startcolormode\maintextcolor - \fi - \else - \startcolormode\@@currentcolor - \fi\fi} - -% weird stuff - -\def\pushpostponedpagecolor - {\edef\savedtopofpagecolor{\topofpagecolor}% - \doifsomething\savedtopofpagecolor\restorecolormode} % \stopcolormode - -\def\poppostponedpagecolor - {\doifsomething\savedtopofpagecolor\startcolormode\savedtopofpagecolor} - -%D \macros -%D {startcolorpage, stopcolorpage} -%D -%D Local use can be forced with the next two macros. Nesting -%D is still supported but colors are no longer marked. -%D -%D The next implementation makes (simple) color separation more -%D easy. It also supports nested colors in page backgrounds -%D and texts. - -\def\startcolorpage - {\bgroup - \let\@@colorplus \minusone - \let\@@colorminus\plusone - \let\docolormark\gobbleoneargument - \edef\savedcolorlevel{\the\colorlevel}% - \global\colorlevel\zerocount % before \localstartcolor of - \ifx\maintextcolor\empty % course, ugly bug removed - \localstartcolor[\defaulttextcolor]% - \else - \localstartcolor[\maintextcolor]% - \fi} - -\def\stopcolorpage - {\localstopcolor - \global\colorlevel\savedcolorlevel - \egroup} - -\appendtoks \startcolorpage\to\everystarttextproperties -\prependtoks\stopcolorpage \to\everystoptextproperties - -%D We want color support to be similar to font support and -%D therefore implement \type{\color} using grouping. -%D -%D When \type {\somecolor} is issued, we can savely assume -%D grouping. Using \type {\groupedcommand} here (i.e.\ the -%D definition of \type {\color}) is unsafe because in -%D interferes with for instance switching attributes. - -\unexpanded\def\color[#1]% - {\groupedcommand{\startcolor[#1]}\stopcolor} - -%D This implementation enables use of defined colors like: -%D -%D \starttyping -%D Look at the {\brightgreen bright} side of life and get -%D yourself no \red{red} head! -%D \stoptyping - -%D Also wrong, test in combinations: \type{...{}{\red test}} -%D -%D \def\switchtocolor[#1]% -%D {\startcolor[#1]\aftergroup\stopcolor} - -\def\switchtocolor[#1]% grouping is realy needed, else migration - {\bgroup\startcolor[#1]\aftergroup\stopcolor\aftergroup\egroup} - -\unexpanded\def\color[#1]% - {\groupedcommand{\startcolor[#1]}\stopcolor} - -\unexpanded\def\graycolor[#1]% not \gray because this is a color - {\groupedcommand{\RGBsupportedfalse\CMYKsupportedfalse\SPOTsupportedfalse\startcolor[#1]}\stopcolor} - -%D We can speed the following macros a bit up, but this -%D hardly pays off; they are only used in the manual. - -\def\realcolorformat#1% - {\ifnum#1<\plusten 0.00\the#1\else - \ifnum#1<\plushundred 0.0\the#1\else - \ifnum#1<\plusthousand 0.\the#1\else - 1.000\fi\fi\fi} - -\def\dodoformatcolor#1% - {\colordimen#1\points\relax - \ifdim\colordimen>\onepoint - \colordimen\onepoint - \fi - \multiply\colordimen \plusthousand - \colorcount\colordimen - \advance\colorcount \medcard - \divide\colorcount \maxcard \relax - \realcolorformat\colorcount} - -\def\doformatcolorR#1:#2:#3:#4:#5\od - {\dodoformatcolor{#1}\colorformatseparator - \dodoformatcolor{#2}\colorformatseparator - \dodoformatcolor{#3}} - -\def\doformatcolorC#1:#2:#3:#4:#5:#6\od - {\dodoformatcolor{#1}\colorformatseparator - \dodoformatcolor{#2}\colorformatseparator - \dodoformatcolor{#3}\colorformatseparator - \dodoformatcolor{#4}} - -\def\doformatcolorS#1:#2:#3\od - {\dodoformatcolor{#1}} - -\def\doformatcolorP#1:#2:#3:#4:#5:#6\od - {#1\colorformatseparator - \dodoformatcolor{#2}\colorformatseparator - \dodoformatcolor{#3}\colorformatseparator - \dodoformatcolor{#4}\colorformatseparator} - -\def\doformatcolor#1:% - {\csname doformatcolor#1\endcsname} - -\def\colorvalue - {\dowithcolor\doformatcolor} - -\def\doformatgrayR#1:#2:#3:#4:#5\od - {\convertRGBtoGRAY{#1}{#2}{#3}% - \dodoformatcolor\@@cl@@s} - -\def\doformatgrayC#1:#2:#3:#4:#5:#6\od - {\convertCMYKtoGRAY{#1}{#2}{#3}{#4}% - \dodoformatcolor\@@cl@@s} - -\def\doformatgrayS#1:#2:#3\od - {\dodoformatcolor{#1}} - -% \def\doformatgrayP#1:#2:#3:#4:#5:#6\od -% {\convertSPOTtoGRAY{#1}{#2}{#3}{#4}% -% \dodoformatcolor\@@cl@@s} - -\def\doformatgrayP#1:#2:#3:#4:#5:#6\od - {todo} - -\def\doformatgray#1:% - {\csname doformatgray#1\endcsname} - -\def\grayvalue - {\dowithcolor\doformatgray} - -%D \macros -%D {localstartraster,localstopraster, -%D startraster,stopraster} -%D -%D The previous conversions are not linear and treat each color -%D component according to human perception curves. Pure gray -%D (we call them rasters) has equal color components. In -%D \CONTEXT\ rasters are only used as backgrounds and these -%D don't cross page boundaries in the way color does. Therefore -%D we don't need stacks and marks. Just to be compatible with -%D color support we offer both 'global' and 'local' commands. -%D -%D \starttyping -%D \def\localstartraster[#1]% -%D {\doifelsenothing{#1} -%D {\dostartgraymode\@@rsscreen} -%D {\dostartgraymode{#1}}} -%D -%D \def\localstopraster -%D {\dostopgraymode} -%D -%D \let\startraster\localstartraster -%D \let\stopraster \localstopraster -%D \stoptyping -%D -%D The next alternative is slower, since it works on top of the -%D color (stack) mechanism, but it does provide nesting. - -\def\dosetrastercolor#1% - {\edef\@@cl@@s{#1}% - \ifx\@@cl@@s\empty - \let\@@cl@@s\@@rsscreen - \fi - \let\@@cl@@t\@@cl@@z % else we get rogue - \let\@@cl@@a\@@cl@@z % transpancies - \setevalue{\??cr\??rs}{\colorSpattern}} - -% beware, don't add extra grouping, else color in tables -% fails - -\def\localstartraster[#1]% - {\ifincolor\dosetrastercolor{#1}\localstartcolor[\??rs]\fi} - -\def\startraster[#1]% - {\ifincolor\dosetrastercolor{#1}\startcolor[\??rs]\fi} - -\def\localstopraster{\ifincolor\localstopcolor\fi} -\def\stopraster {\ifincolor\stopcolor\fi} - -\def\raster[#1]{\groupedcommand{\startraster[#1]}{\stopraster}} - -%D Palets use an auxiliary macro: - -\def\dodefinepaletcolor#1#2#3% - {\doifassignmentelse{#3} - {% == \definepalet[test][xx={y=.4}] - \definecolor[\??pa#1:#2][#3]% - \iffreezecolors\@EA\setevalue\else\@EA\setvalue\fi{\??cr#1:#2}{\csname\??cr\??pa#1:#2\endcsname}} - {% == \definepalet[test][xx=green] - \doifdefinedelse{\??cr#3} - {\iffreezecolors\@EA\setevalue\else\@EA\setvalue\fi{\??cr#1:#2}{\csname\??cr#3\endcsname}} - {\letvalue{\??cr#1:#2}\colorXpattern}}} - -%D \MP\ related conversions: - -\def\scaledMPcolor#1#2% - {\ifMPgraphics - \handlecolorwith\doMPcolor - \csname\??cr - \ifcsname\??cr\currentpalet#2\endcsname\currentpalet\fi - #2\endcsname - :::::::\end#1\end - \else - #2% - \fi} - -\def\MPcolor{\scaledMPcolor1} - -%D Before we had transparency available, the following -%D conversion macro was available: -%D -%D \starttyping -%D \def\doMPcolor#1:#2:#3:#4:#5:#6:#7:#8\end -%D {\if #1R(#2,#3,#4)% -%D \else\if#1C\ifMPcmykcolors cmyk(#2,#3,#4,#5)\else(1-#2-#5,1-#3-#5,1-#4-#5)\fi -%D \else\if#1S(#2,#2,#2)% -%D \else (0,0,0)% -%D \fi\fi\fi} -%D \stoptyping -%D -%D In order to be useful, this macro is to be fully -%D expandabele. - -\def\doMPcolor#1:% #1 can be \relax ! ! ! i.e. an empty color - {\csname MPc\@EA\ifx\csname MPc\string#1\endcsname\relax B\else#1\fi\endcsname} - -\def\MPcR{\doMPrgb} -\def\MPcC{\ifMPcmykcolors\@EA\doMPcmykY\else\@EA\doMPcmykN\fi} -\def\MPcS{\doMPgray} -\def\MPcP{\ifMPspotcolors\@EA\doMPspotY\else\@EA\doMPspotN\fi} -\def\MPcB{\doMPblack} - -\def\transparentMP {transparent} -\def\cmykMP {scaledcmyk} -\def\cmykASrgbMP {scaledcmykasrgb} % not really needed any more -\def\rgbMP {scaledrgb} -\def\grayMP {scaledgray} -\def\spotMP {spotcolor} - -\def\doMPtransparent#1#2:#3:#4\end - {\ifcase#2\space(#1)\else\transparentMP(#2,#3,(#1))\fi} - -\def\doMPgray#1:#2\end#3\end - {\doMPtransparent{\grayMP(#1,#3)}#2\end} - -\def\doMPrgb#1:#2:#3:#4\end#5\end - {\doMPtransparent{\rgbMP(#1,#2,#3,#5)}#4\end} - -\def\doMPcmykY#1:#2:#3:#4:#5\end#6\end - {\doMPtransparent{\cmykMP(#1,#2,#3,#4,#6)}#5\end} - -\def\doMPcmykN#1:#2:#3:#4:#5\end#6\end - {\doMPtransparent{\cmykASrgbMP(#1,#2,#3,#4,#6)}#5\end} - -\def\doMPspotY#1:#2:#3:#4:#5\end#6\end % best make #3 same as #1 when empty - {\doMPtransparent{multitonecolor("#1",#2,"#3","#4")}#5\end} - -\def\doMPspotN#1:#2:#3:#4:#5\end#6\end - {\scaledMPcolor{#4}{#1}} - -\def\doMPblack#1\end#2\end - {\unknownMPcolor} - -\def\unknownMPcolor - {(0,0,0)} - -\let\processMP\spotMP % for some time, will become obsolete, brrr - -%D \PDF\ related conversions: - -\def\PDFcolor #1{\handlecolorwith\doPDFcolor \csname\??cr#1\endcsname:::::::\end} -\def\PDFcolorvalue#1{\handlecolorwith\doPDFcolorvalue\csname\??cr#1\endcsname:::::::\end} -\def\FDFcolor #1{\handlecolorwith\doFDFcolor \csname\??cr#1\endcsname:::::::\end} - -\def\doPDFcolor#1:#2:#3:#4:#5:#6:#7:#8\end - {\if #1R#2 #3 #4 rg% - \else\if#1C#2 #3 #4 #5 k% - \else\if#1S#2 g% - \else\if#1P#5 g% - \else 0 g% - \fi\fi\fi\fi} - -\def\doPDFcolorvalue#1:#2:#3:#4:#5:#6:#7:#8\end - {\if #1R#2 #3 #4% - \else\if#1C#2 #3 #4 #5% - \else\if#1S#2% - \else\if#1P#5% - \else 0% - \fi\fi\fi\fi} - -\def\doFDFcolor#1:#2:#3:#4:#5:#6:#7:#8\end - {[\if #1R#2 #3 #4% - \else\if#1C#2 #3 #4 #5% - \else\if#1S#2% - \else\if#1P#5% - \else 0% - \fi\fi\fi\fi]} - -\def\internalspotcolorname#1{\handlecolorwith\dointernalspotcolorname\csname\??cr#1\endcsname:::::::\end} -\def\internalspotcolorsize#1{\handlecolorwith\dointernalspotcolorsize\csname\??cr#1\endcsname:::::::\end} - -\def\dointernalspotcolorname#1:#2:#3:#4:#5:#6:#7:#8\end{\if#1P\ifcase0#3 #1\else#2\fi\else#1\fi} -\def\dointernalspotcolorsize#1:#2:#3:#4:#5:#6:#7:#8\end{\if#1P\ifcase0#3 0\else#3\fi\else 0\fi} - -%D Slow but ok \unknown - -\def\colorcomponents#1% - {\startnointerference - \localcolortrue - \def\doexeccolorR ##1:##2:##3:##4:##5\od{\gdef\thecolorcomponents{r=\twodigitrounding{##1} g=\twodigitrounding{##2} b=\twodigitrounding{##3}}}% - \def\doexeccolorC##1:##2:##3:##4:##5:##6\od{\gdef\thecolorcomponents{c=\twodigitrounding{##1} m=\twodigitrounding{##2} y=\twodigitrounding{##3} k=\twodigitrounding{##4}}}% - \def\doexeccolorS ##1:##2:##3\od{\gdef\thecolorcomponents{s=\twodigitrounding{##1}}}% - \def\doexeccolorP##1:##2:##3:##4:##5:##6\od{\gdef\thecolorcomponents{p=\twodigitrounding{##4} n=##1}}% - \let\doexeccolorPindex\doexeccolorP - \backgroundline[#1]{}% - \stopnointerference - \thecolorcomponents} - -\def\transparencycomponents#1% - {\startnointerference - \localcolortrue - \def\doexeccolorR ##1:##2:##3:##4:##5\od{\gdef\thetransparencycomponents{a=\twodigitrounding{##4} t=\twodigitrounding{##5}}}% - \def\doexeccolorC##1:##2:##3:##4:##5:##6\od{\gdef\thetransparencycomponents{a=\twodigitrounding{##5} t=\twodigitrounding{##6}}}% - \def\doexeccolorS ##1:##2:##3\od{\gdef\thetransparencycomponents{a=\twodigitrounding{##2} t=\twodigitrounding{##3}}}% - \def\doexeccolorP##1:##2:##3:##4:##5:##6\od{\gdef\thetransparencycomponents{a=\twodigitrounding{##5} t=\twodigitrounding{##6}}}% - \let\doexeccolorPindex\doexeccolorP - \backgroundline[#1]{}% - \stopnointerference - \thecolorcomponents} - -%D \macros -%D {everyshapebox} -%D -%D A terrible hack, needed because we cannot have marks in -%D shape boxes. - -\appendtoks \localcolortrue \to \everyshapebox - -\protect \endinput diff --git a/Master/texmf-dist/tex/context/base/colo-new.mkiv b/Master/texmf-dist/tex/context/base/colo-new.mkiv deleted file mode 100644 index d054095fe97..00000000000 --- a/Master/texmf-dist/tex/context/base/colo-new.mkiv +++ /dev/null @@ -1,331 +0,0 @@ -%D \module -%D [ file=colo-ini, -%D version=2007.08.08, -%D title=\CONTEXT\ Color Macros, -%D subtitle=Initialization, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA / Hans Hagen \& Ton Otten}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\unprotect - -\chardef\colorversion=2 - -% todo: palets in definecolor -% todo: {\red xx} vs \red{xx} - -% check: registerusedspotcolors -% check: registerusedcolorchannels -% check: \currentcolorname -% check: \outercolorname -% check: \chardef\currentcolorchannel=0 -% check: \startcolormode -% check: \newif\iffilterspotcolor \filterspotcolorfalse -% check: \newif\ifdoingspotcolor \doingspotcolorfalse -% check: \registercolorchannel - -% \def\mptexcolor#1{"\dogetattributeid\s!color \somecolorattribute{#1} A"} -% -% \startMPpage -% fill fullcircle scaled 10cm ; -% fill fullcircle scaled 5cm withprescript \mptexcolor{red} withpostscript \mptexcolor{black} ; -% fill fullcircle scaled 3cm ; -% draw btex test etex withprescript \mptexcolor{blue} ; -% \stopMPpage - -\registerctxluafile{colo-new}{1.000} - -\ifx\currentcolormodel\undefined \newcount\currentcolormodel \fi - -\def\setcolormodel#1% - {\currentcolormodel\ctxlua{tex.print(colors.setmodel('colormodel','#1'))}% - \dosetattribute{colormodel}{\the\currentcolormodel}} - -\setcolormodel{all} - -\def\dosetupcolormodel - {\ifincolor - \ifRGBsupported - \ifCMYKsupported - \setcolormodel{all}% - \else - \setcolormodel{rgb}% - \fi - \else - \ifCMYKsupported - \setcolormodel{cmyk}% - \else - \setcolormodel{gray}% - \fi - \fi - \else - \setcolormodel{gray}% - \fi} - -\appendtoks - \dosetupcolormodel -\to \everyjob - -% Currently in mkiv transparency is implemented independent of color. This costs -% a bit more processing time but gives the possibility to apply transparency -% independently in the future. Is this useful? If not we may as well combine them -% again in the future. By coupling we are downward compatible. When we decouple we -% need to do more tricky housekeeping (e.g. persist color independent transparencies -% when color bound ones are nil. - -% Since we couple definitions, we could stick to one test. Todo. Same for mpcolor. - -% \def\doactivatecolor#1% : in currentpalet, maybe not, ugly -% {\ifcsname(cs:\currentpalet#1)\endcsname -% \csname(cs:\currentpalet#1)\endcsname -% \csname(ts:\currentpalet#1)\endcsname -% \else -% \csname(cs:#1)\endcsname -% \csname(ts:#1)\endcsname -% \fi} - -% \def\doactivatecolor#1% : in currentpalet, maybe not, ugly -% {\csname(cs:\ifcsname(cs:\currentpalet#1)\endcsname\currentpalet\fi#1)\endcsname} -% \csname(ts:\ifcsname(ts:\currentpalet#1)\endcsname\currentpalet\fi#1)\endcsname} -% -% more robust test, else we get \relaxed non-colors which may confuse e.g. mpcolor - -\letvalue{(cs:-}\empty -\letvalue{(ts:-}\empty - -% \def\doactivatecolor#1% : in currentpalet, maybe not, ugly -% {\csname(cs:\ifcsname(cs:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(cs:#1)\endcsname#1\else-\fi\fi)\endcsname -% \csname(ts:\ifcsname(ts:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ts:#1)\endcsname#1\else-\fi\fi)\endcsname} - -\def\doactivatecolor#1% : in currentpalet, maybe not, ugly - {\ifcsname(cs:\currentpalet#1)\endcsname - \csname(cs:\currentpalet#1)\endcsname - \csname(ts:\currentpalet#1)\endcsname - \else\ifcsname(cs:#1)\endcsname - \csname(cs:#1)\endcsname - \csname(ts:#1)\endcsname - \fi\fi} - -\let\normaldoactivatecolor\doactivatecolor - -% if it becomes a bottleneck we can set up a more complex system with one shared -% attribute for colorspace, color and transparency - -\def\doactivatecolor - {\ifproductionrun - \ctxlua{colors.enabled=true transparencies.enabled=true}% not that efficient but at least robust - \let\doactivatecolor\normaldoactivatecolor - \expandafter\doactivatecolor - \else - \expandafter\normaldoactivatecolor - \fi} - -\def\deactivatecolor - {\doresetattribute\s!color - \doresetattribute\s!transparency} - -\def\dodefinecolorcommand#1#2% - {\unexpanded#1{#2}{\doactivatecolor{#2}}} - -\def\dodefinecolor[#1][#2]% - {\addtocommalist{#1}\colorlist - \ctxlua{ctx.defineprocesscolor("#1","#2",false,\iffreezecolors true\else false\fi)}% - \dodefinecolorcommand\setvalue{#1}} - -\def\dodefineglobalcolor[#1][#2]% - {\doglobal\addtocommalist{#1}\colorlist - \ctxlua{ctx.defineprocesscolor("#1","#2",true,\iffreezecolors true\else false\fi)}% - \dodefinecolorcommand\setgvalue{#1}} - -\def\dodefinenamedcolor[#1][#2]% - {\doglobal\addtocommalist{#1}\colorlist - \ctxlua{ctx.defineprocesscolor("#1","#2",false,\iffreezecolors true\else false\fi)}% - \dodefinecolorcommand\setvalue{#1}} - -\def\dodefinespotcolor[#1][#2][#3]% - {\doglobal\addtocommalist{#1}\colorlist % optional - \doglobal\addtocommalist{#2}\allspotcolors - \ctxlua{ctx.definespotcolor("#1","#2","#3",true)}% - \dodefinecolorcommand\setxvalue{#1}} - -\def\dodefinemultitonecolor[#1][#2][#3][#4]% - {\ctxlua{ctx.definemultitonecolor("#1","#2","#3","#4",true)}% - \dodefinecolorcommand\setxvalue{#1}} - -\def\dodefinetransparency[#1][#2]% - {\ctxlua{ctx.definetransparency("#1",#2)}} - -\def\dosetrastercolor#1% slow, we need a fast one - {\edef\@@rastervalue{#1}% - \ifx\@@rastervalue\empty - \let\@@rastervalue\@@rsscreen - \fi - \dosetattribute\s!color{\ctxlua{tex.sprint(ctx.definesimplegray("_raster_",\@@rastervalue))}}} - -\def\doifcolorelse#1% - {\ifcsname(ca:\currentpalet#1)\endcsname - \@EA\firstoftwoarguments - \else\ifcsname(ca:#1)\endcsname - \@EAEAEA\firstoftwoarguments - \else - \@EAEAEA\secondoftwoarguments - \fi\fi} - -\def\doifcolor#1% - {\ifcsname(ca:\currentpalet#1)\endcsname - \@EA\firstofoneargument - \else\ifcsname(ca:#1)\endcsname - \@EAEAEA\firstofoneargument - \else - \@EAEAEA\gobbleoneargument - \fi\fi} - -% currentcolor, then we can push pop in register - - \def\switchtocolor [#1]{\getvalue{#1}} -\unexpanded\def\color [#1]{\groupedcommand{\doactivatecolor{#1}}{}} -\unexpanded\def\startcolor [#1]{\begingroup\doactivatecolor{#1}} -\unexpanded\def\stopcolor {\endgroup} -\unexpanded\def\graycolor [#1]{\groupedcommand{\setcolormodel{gray}\getvalue{#1}}{}} - \def\predefinecolor [#1]{\flushatshipout{\hbox{\color[#1]{}}}} - \def\predefineindexcolor[#1]{\flushatshipout{\hbox{\color[#1]{}}}} - \def\startcolorpage {\startcolor[\ifx\maintextcolor\empty\defaulttextcolor\else\maintextcolor\fi]} - \def\stopcolorpage {\stopcolor} - \def\localstartraster [#1]{\dosetrastercolor{#1}} - \def\localstopraster {} - \def\startraster [#1]{\dosetrastercolor{#1}} - \def\stopraster {} - \def\raster [#1]{\groupedcommand{\dosetrastercolor{#1}}{}} - \def\faststartcolor [#1]{\doactivatecolor{#1}} - \def\faststopcolor {} - -\def\startregistercolor[#1]% - {\doifelsenothing{#1} - {\let\stopregistercolor\relax} - {\edef\stopregistercolor - {\dosetattribute\s!color {\dogetattribute\s!color }% - \dosetattribute\s!transparency{\dogetattribute\s!transparency}}% - \doactivatecolor{#1}}} - -\let\resynccolor \relax % ? -\let\pushcolor \relax -\let\popcolor \relax -\let\popsplitcolor\relax - -\def\restorecolormode - {\ifincolor - \deactivatecolor - \ifx\maintextcolor\empty \else - \doactivatecolor\maintextcolor - \fi - \fi} - -\def\pushpostponedpagecolor - {\edef\savedtopofpagecolor{\topofpagecolor}% - \doifsomething\savedtopofpagecolor\restorecolormode} - -\def\poppostponedpagecolor - {\doifsomething\savedtopofpagecolor\doactivatecolor\savedtopofpagecolor} - -% \def\pushcolor -% {\edef\popcolor -% {\dosetattribute\s!color {\dogetattribute\s!color }% -% \dosetattribute\s!transparency{\dogetattribute\s!transparency}}% -% \let\popsplitcolor\popcolor -% \deactivatecolor} - -\appendtoks\deactivatecolor\to\everybeforeoutput % maybe we don't need push pop now - -\def\startregistercolor[#1]% - {\doifelsenothing{#1} - {\let\stopregistercolor\relax} - {\edef\stopregistercolor - {\dosetattribute\s!color {\dogetattribute\s!color }% - \dosetattribute\s!transparency{\dogetattribute\s!transparency}}% - \doactivatecolor{#1}}} - -\let\grey \graycolor -\let\localstartcolor \startcolor -\let\localstopcolor \stopcolor -\let\globalstartcolor\startcolor -\let\globalstopcolor \stopcolor - -\def\registermaintextcolor{\ctxlua{colors.main = \thecolorattribute\maintextcolor}} - -\def\starttextcolor[#1]% - {\doifsomething{#1} - {\definecolor[\@@themaintextcolor][#1]% - \let\maintextcolor\@@themaintextcolor - \doactivatecolor\maintextcolor - \registermaintextcolor}} - -\let\stoptextcolor\relax - -\def\initializemaintextcolor - {\doifelsenothing\@@cltextcolor - {\definecolor[\@@themaintextcolor][\defaulttextcolor]} - {\definecolor[\@@themaintextcolor][\@@cltextcolor]}% - \let\maintextcolor\@@themaintextcolor - \doactivatecolor\maintextcolor - \registermaintextcolor} - -\appendtoks \initializemaintextcolor \to \everyjob - -\def\localstarttextcolor{\expanded{\startcolor[\ifx\maintextcolor\empty\defaulttextcolor\else\maintextcolor\fi]}} -\let\localstoptextcolor \stopcolor -\let\restoretextcolor \firstofoneargument - -\def\dodefinepaletcolor#1#2#3% - {\doifassignmentelse{#3}% \definepalet[test][xx={y=.4}] - {\definecolor[\??pa#1:#2][#3]% - \iffreezecolors\@EA\setevalue\else\@EA\setvalue\fi{(cs:#1:#2)}{\csname(cs:\??pa#1:#2)\endcsname}% - \iffreezecolors\@EA\setevalue\else\@EA\setvalue\fi{(ca:#1:#2)}{\csname(ca:\??pa#1:#2)\endcsname}} - {\doifdefinedelse{(cs:#3)}% \definepalet[test][xx=green] - {\iffreezecolors\@EA\setevalue\else\@EA\setvalue\fi{(cs:#1:#2)}{\csname(cs:#3)\endcsname}% - \iffreezecolors\@EA\setevalue\else\@EA\setvalue\fi{(ca:#1:#2)}{\csname(ca:#3)\endcsname}} - {\letvalue{(cs:#1:#2)}\undefined - \letvalue{(ca:#1:#2)}\undefined}}} - -\setvalue{(cs:)}{} \setvalue{(ca:)}{0} -\setvalue{(ts:)}{} \setvalue{(ta:)}{0} - -\def\doinheritca#1{\csname(ca:\ifcsname(ca:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ca:#1)\endcsname#1\fi\fi)\endcsname} -\def\doinheritcs#1{\csname(cs:\ifcsname(cs:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(cs:#1)\endcsname#1\fi\fi)\endcsname} -\def\doinheritta#1{\csname(ta:\ifcsname(ta:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ta:#1)\endcsname#1\fi\fi)\endcsname} -\def\doinheritts#1{\csname(ts:\ifcsname(ts:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ts:#1)\endcsname#1\fi\fi)\endcsname} - -\def\MPcolor#1{\ctxlua{tex.sprint(ctx.mpcolor(\number\currentcolormodel,\number\doinheritca{#1},\number\doinheritta{#1}))}} - -\def\currentcolorname{\s!black} % todo -\def\outercolorname {\s!black} % todo - -\def\thecolorattribute #1{\number\csname(ca:\ifcsname(ca:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ca:#1)\endcsname#1\fi\fi)\endcsname} -\def\thetransparencyattribute#1{\number\csname(ta:\ifcsname(ta:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ta:#1)\endcsname#1\fi\fi)\endcsname} - -\def\PDFcolor #1{\ctxlua{tex.sprint(ctx.pdfcolor (\number\currentcolormodel, \thecolorattribute{#1}))}} -\def\PDFcolorvalue#1{\ctxlua{tex.sprint(ctx.pdfcolorvalue(\number\currentcolormodel, \thecolorattribute{#1}))}} -\def\FDFcolor #1{\ctxlua{tex.sprint(ctx.fdfcolor (\number\currentcolormodel, \thecolorattribute{#1}))}} - -\def\internalspotcolorname#1{\ctxlua{tex.sprint(ctx.spotcolorname (\thecolorattribute{#1}))}} -\def\internalspotcolorsize#1{\ctxlua{tex.sprint(ctx.spotcolorvalue(\thecolorattribute{#1}))}} - -\def\colorcomponents #1{\ctxlua{tex.sprint(ctx.colorcomponents (\thecolorattribute {#1}))}} -\def\transparencycomponents#1{\ctxlua{tex.sprint(ctx.transparencycomponents(\thetransparencyattribute{#1}))}} - -\def\colorvalue#1{\ctxlua{tex.sprint(ctx.formatcolor(\thecolorattribute{#1},"\colorformatseparator"))}} -\def\grayvalue #1{\ctxlua{tex.sprint(ctx.formatgray (\thecolorattribute{#1},"\colorformatseparator"))}} - -% hack, till we have adapted backend: (move it there) - -\def\presetPDFtransparencybynumber#1#2#3% - {\initializetransparency - \ifcase#1\else - \global\PDFcurrenttransparency\numexpr#1+\minusone\relax - \presetPDFtransparency{#2}{#3}% - \fi} - -\protect \endinput diff --git a/Master/texmf-dist/tex/context/base/colo-new.tex b/Master/texmf-dist/tex/context/base/colo-new.tex deleted file mode 100644 index 04b2ef716f7..00000000000 --- a/Master/texmf-dist/tex/context/base/colo-new.tex +++ /dev/null @@ -1,1051 +0,0 @@ -%D \module -%D [ file=colo-ini, -%D version=2007.08.08, -%D title=\CONTEXT\ Color Macros, -%D subtitle=Initialization, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA / Hans Hagen \& Ton Otten}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -%D We need to clean this up further but first we hav eto make sure that mkiv -%D code works ok. - -\writestatus{loading}{Context Color Macros / initialization} - -%D This module implements color. Since \MKII\ and \MKIV\ use a completely -%D different approach, this module only implements a few generic mechanisms. - -\startmessages dutch library: colors - title: kleur - 1: systeem -- is globaal actief - 2: systeem -- is lokaal actief - 3: -- is niet gedefinieerd -- - 4: systeem -- wordt geladen - 5: onbekend systeem -- - 6: palet -- is beschikbaar - 7: palet -- is niet beschikbaar - 8: specificatie -- bij -- wordt zwart - 9: -- kleurruimte wordt niet ondersteund - 10: -- kleurruimte wordt ondersteund - 11: kleur wordt vertaald in grijs - 12: -- is geregistreerd -\stopmessages - -\startmessages english library: colors - title: color - 1: system -- is global activated - 2: system -- is local activated - 3: -- is not defined -- - 4: system -- is loaded - 5: unknown system -- - 6: palette -- is available - 7: palette -- is not available - 8: specification -- at color -- becomes black - 9: -- color space is not supported - 10: -- color space is supported - 11: color is converted to gray - 12: -- is registered -\stopmessages - -\startmessages german library: colors - title: farbe - 1: system -- ist global aktiviert - 2: system -- ist lokal aktiviert - 3: -- ist undefiniert -- - 4: system -- ist geladen - 5: unbekanntes System -- - 6: palette -- ist verfuegbar - 7: palette -- ist nicht verfuegbar - 8: Spezifikation -- bei Farbe -- wird schwarz - 9: -- Farbraum wird nicht unterstuetzt - 10: -- Farbraum wird unterstuetzt - 11: Farbe wird in Grau umgewandelt - 12: -- is registered -\stopmessages - -\startmessages czech library: colors - title: barva - 1: system -- je globalne aktivovana - 2: system -- je lokalne activovana - 3: -- neni definovana -- - 4: system -- je nacten - 5: neznamy system -- - 6: palette -- je k dispozici - 7: palette -- neni k dispozici - 8: specifikace -- v barve -- bude cerna - 9: -- prostor barev neni podporovan - 10: -- prostor barev je podporovan - 11: barva je prevedena na sed - 12: -- is registered -\stopmessages - -\startmessages italian library: colors - title: colore - 1: sistema -- attivato globalmente - 2: sistema -- attivato localmente - 3: -- non definito -- - 4: sistema -- caricato - 5: sistema -- sconosciuto - 6: tavolozza -- resa disponibile - 7: tavolozza -- non disponibile - 8: specifica -- del colore -- convertita in nero - 9: spazio dei colori -- non supportato - 10: spazio dei colori -- supportato - 11: il colore ø convertito in grigio - 12: -- is registered -\stopmessages - -\startmessages norwegian library: colors - title: farge - 1: system -- er aktivert globalt - 2: system -- er aktivert lokalt - 3: -- er udefinert -- - 4: system -- er lest inn - 5: ukjent system -- - 6: palett -- er tilgjengelig - 7: palett -- er ikke tilgjengelig - 8: spesifikasjon -- for farge -- gir kun svart - 9: -- fargerom er ikke støttet - 10: -- fargerom er støttet - 11: fargen vil bli vist som grø - 12: -- is registered -\stopmessages - -\startmessages romanian library: colors - title: culori - 1: sistem -- este activata global - 2: sistem -- este activata local - 3: -- nu este definita -- - 4: sistem -- este incarcata - 5: sistem -- necunoscuta - 6: paleta -- este disponibila - 7: palette -- nu este disponibila - 8: specificatia -- la culoarea -- devine neagra - 9: spatiul de culoare -- nu este suportat - 10: spatiul de culoare -- este suportat - 11: culoarea este convertita la gri - 12: -- is registered -\stopmessages - -\startmessages french library: colors - title: couleurs - 1: le système -- est globalement activé - 2: le système -- est localement activé - 3: -- n'est pas défini -- - 4: le système -- est chargé - 5: système -- inconnu - 6: la palette -- est disponible - 7: le palette -- n'est pas disponible - 8: la spécification -- de la couleur -- devient noire - 9: l'espace de couleur -- n'est pas supporté - 10: -- l'espace de couleur est supporté - 11: la couleur est convertie en niveau de gris - 12: -- est enregistré -\stopmessages - -\unprotect - -\chardef\colorversion=1 % temp, needed for tracing purposes, mkiv transition - -%D We use a couple of local registers. That way we don't have -%D to group when converting colors. By the way, this is not -%D really faster. We can sqeeze half a second runtime for 50K -%D switches on a 1G machine, but the macros will become rather -%D ugly then. To mention one such improvement: no colon -%D after the key character (.25 sec). - -\newdimen\colordimen -\newcount\colorcount - -%D When typesetting for paper, we prefer using the \cap{CMYK} -%D color space, but for on||screen viewing we prefer \cap{RGB} -%D (the previous implementation supported only this scheme). -%D Independant of such specifications, we support some automatic -%D conversions: -%D -%D \startitemize[packed] -%D \item convert all colors to \cap{RGB} -%D \item convert all colors to \cap{CMYK} -%D \item convert all colors to gray scales -%D \stopitemize -%D -%D We also support optimization of colors to gray scales. -%D -%D \startitemize[continue] -%D \item reduce gray colors to gray scales -%D \item reduce \cap{CMY} components to \cap{K} -%D \stopitemize -%D -%D These options are communicated by means of: - -\newif\ifRGBsupported -\newif\ifCMYKsupported -\newif\ifSPOTsupported -\newif\ifpreferGRAY -\newif\ifGRAYprefered -\newif\ifreduceCMYK -\newif\ifconverttoGRAY -\newif\ifweightGRAY \weightGRAYtrue - -\newif\ifconvertMPcolors -\newif\ifreduceMPcolors -\newif\ifforcegrayMPcolors - -%D The last boolean controls reduction of \cap{CMYK} to -%D \cap{CMY} colors. When set to true, the black component -%D is added to the other ones. -%D -%D Prefering gray is not the same as converting to gray. -%D Conversion treats each color components in a different way, -%D while prefering is just a reduction and thus a -%D space||saving option. - -\newif\iffreezecolors \freezecolorsfalse -\newif\ifincolor % true if colors enabled -\newif\iflocalcolor - -\let\colorlist \empty -\let\currentspotcolor \empty -\let\allspotcolors \empty -\let\usedspotcolors \empty -\let\usedcolorchannels\empty -\let\currentpalet \empty - -%D \macros -%D {definecolor,defineglobalcolor,definenamedcolor,definespotcolor,definemultitonecolor} -%D -%D \startbuffer -%D \definecolor [blue] [c=1,m=.38,y=0,k=.64] % pantone pms 2965 uncoated m -%D \definecolor [yellow] [c=0,m=.28,y=1,k=.06] % pantone pms 124 uncoated m -%D -%D \definespotcolor [blue-100] [blue] [p=1] -%D \definespotcolor [yellow-100] [yellow] [p=1] -%D -%D \definemultitonecolor [pdftoolscolor] [blue=.12,yellow=.28] [c=.1,m=.1,y=.3,k=.1] -%D -%D \useexternalfigure[demofig][mill.png][object=no] -%D -%D \startcombination[4*1] -%D {\externalfigure[demofig]} {no color} -%D {\externalfigure[demofig][color=pdftoolscolor]} {indexed duotone} -%D {\externalfigure[demofig][color=blue-100]} {spot color} -%D {\externalfigure[demofig][color=yellow-100]} {spot color} -%D \stopcombination -%D \stopbuffer -%D -%D \getbuffer \typebuffer - -\def\definecolor {\dodoubleargument\dodefinecolor} -\def\defineglobalcolor {\dodoubleargument\dodefineglobalcolor} -\def\definenamedcolor {\dodoubleargument\dodefinenamedcolor} -\def\definespotcolor {\dotripleargument\dodefinespotcolor} -\def\definemultitonecolor{\doquadrupleempty\dodefinemultitonecolor} - -% check: registerusedspotcolors -% check: registerusedcolorchannels - -%D \macros -%D {doifcolorelse, doifcolor} -%D -%D Switching to a color is done by means of the following -%D command. Later on we will explain the use of palets. We -%D define ourselves a color conditional first. - -\ifx\doifcolorelse\undefined - \let\doifcolorelse\secondoftwoarguments - \let\doifcolor \gobbleoneargument -\fi - -%D \macros -%D {localstartcolor,localstopcolor} -%D -%D Simple color support, that is without nesting, is provided -%D by: - -\ifx\localstartcolor\undefined - \let\localstartcolor\undefined - \let\localstopcolor \undefined -\fi - -%D \macros -%D {faststartcolor,faststopcolor} -%D -%D No checking for arguments and such: - -\ifx\faststartcolor\undefined - \def\faststartcolor[#1]{} - \def\faststopcolor {} -\fi - -%D These local ones may go away in future versions. - -%D \macros -%D {startcolor,stopcolor} -%D -%D The more save method, the one that saves the current color -%D state and returns to this state afterward, is activated by: -%D -%D \showsetup{startcolor} - -\ifx\startcolor\undefined - \let\startcolor\undefined - \let\stopcolor \undefined -\fi - -%D \macros -%D {startcurrentcolor,stopcurrentcolor} - -\def\startcurrentcolor{\startcolor[\outercolorname]} -\def\stopcurrentcolor {\stopcolor} - -%D \macros -%D {color,graycolor} -%D -%D This leaves the simple color command: -%D -%D \showsetup{color} -%D \showsetup{graycolor} - -\ifx\color\undefined - \def\color [#1]{} - \def\graycolor[#1]{} - \def\gray {\graycolor} -\fi - -%D \macros -%D {localstartraster,localstopraster, -%D startraster,stopraster,raster} -%D -%D The previous conversions are not linear and treat each color -%D component according to human perception curves. Pure gray -%D (we call them rasters) has equal color components. In -%D \CONTEXT\ rasters are only used as backgrounds and these -%D don't cross page boundaries in the way color does. Therefore -%D we don't need stacks and marks. Just to be compatible with -%D color support we offer both 'global' and 'local' commands. - -\ifx\startraster\undefined - \def\startraster [#1]{} - \def\stopraster {} - \def\raster [#1]{} - \def\localstartraster[#1]{} - \def\localstopraster {} -\fi - -%D \macros -%D {colorvalue, grayvalue} -%D -%D We can typeset the color components using \type{\colorvalue} and -%D \type{\grayvalue}. The commands: -%D -%D \startbuffer -%D color value of SomeKindOfRed: \colorvalue{SomeKindOfRed} \crlf -%D gray value of SomeKindOfRed: \grayvalue{SomeKindOfRed} -%D \stopbuffer -%D -%D \typebuffer -%D -%D show us: -%D -%D \startvoorbeeld -%D \getbuffer -%D \stopvoorbeeld - -\def\colorformatseparator{ } - -\ifx\colorvalue\undefined - \let\colorvalue\gobbleoneargument - \let\grayvalue \gobbleoneargument -\fi - -% check: \currentcolorname -% check: \outercolorname - -%D \macros -%D {setupcolor} -%D -%D Color definitions can be grouped in files with the name: -%D -%D \starttyping -%D \f!colorprefix-identifier.tex -%D \stoptyping -%D -%D where \type{\f!colorprefix} is \unprotect {\tttf \f!colorprefix}. -%D Loading such a file is done by \protect -%D -%D \showsetup{setupcolor} -%D -%D Some default colors are specified in \type{colo-rgb.tex}, -%D which is loaded into the format by: -%D -%D \starttyping -%D \setupcolor[rgb] -%D \stoptyping - -\let\colorstyle\empty - -\def\setupcolor - {\dosingleargument\dosetupcolor} - -\def\dosetupcolor[#1]% - {\doifnot{#1}\colorstyle - {\def\colorstyle{#1}% - \processcommalist[#1]\dodosetupcolor}} - -\def\dodosetupcolor#1% - {\makeshortfilename[\truefilename{\f!colorprefix#1}]% - \startreadingfile - \readsysfile\shortfilename - {\showmessage\m!colors4\colorstyle} - {\showmessage\m!colors5\colorstyle}% - \stopreadingfile} - -\let\usecolors\setupcolor - -% check: \chardef\currentcolorchannel=0 -% check: \startcolormode -% check: \newif\iffilterspotcolor \filterspotcolorfalse -% check: \newif\ifdoingspotcolor \doingspotcolorfalse -% check: \registercolorchannel - -%D \macros -%D {definetransparency} -%D -%D This command numbers to names: - -\def\definetransparency - {\dodoubleargument\dodefinetransparency} - -\def\setupcolors - {\dosingleargument\dosetupcolors} - -\def\resetcolorsplitting - {\chardef\currentcolorchannel\zerocount - \let\currentspotcolor\empty - \filterspotcolorfalse} - -\def\colorsplitsuffix{\ifcase\currentcolorchannel\else-\@@clsplit\fi} -\def\colorsplitprefix{\ifcase\currentcolorchannel\else\@@clsplit-\fi} - -\def\setcolorsplitting - {\resetsystemmode{\v!color\colorsplitsuffix}% - \resetcolorsplitting - \processaction - [\@@clsplit] - [ c=>\chardef\currentcolorchannel1,% - m=>\chardef\currentcolorchannel2,% - y=>\chardef\currentcolorchannel3,% - k=>\chardef\currentcolorchannel4,% - r=>\chardef\currentcolorchannel5,% - g=>\chardef\currentcolorchannel6,% - b=>\chardef\currentcolorchannel7,% - s=>\chardef\currentcolorchannel8,% - \v!no=>,% \currentcolorchannel0,% all colors - \s!default=>,% \currentcolorchannel0,% all colors - \s!unknown=>\filterspotcolortrue - \edef\currentspotcolor{\commalistelement}]% - \setsystemmode{\v!color\colorsplitsuffix}% - \iffilterspotcolor \let\@@clrgb\v!no \fi} - -\ifx\dosetupcolormodel\undefined - \let\dosetupcolormodel\relax -\fi - -\def\dosetupcolors[#1]% some no longer make sense in MkIV - {\getparameters[\??cl][#1]% - \doifelse\@@clspot\v!yes - \SPOTsupportedtrue - \SPOTsupportedfalse - \doifelsenothing\@@clsplit - \resetcolorsplitting - \setcolorsplitting - \doifelse\@@clreduction\v!yes - \reduceCMYKtrue - \reduceCMYKfalse - \doifelse\@@clexpansion\v!yes - \freezecolorstrue - \freezecolorsfalse - \doifelse\@@clcriterium\v!all - \hidesplitcolortrue - \hidesplitcolorfalse - \doifelse\@@clrgb\v!no - {\ifRGBsupported \showmessage\m!colors {9}\v!rgb\RGBsupportedfalse\fi} - {\ifRGBsupported\else\showmessage\m!colors{10}\v!rgb\RGBsupportedtrue \fi}% - \doifelse\@@clcmyk\v!no - {\ifCMYKsupported \showmessage\m!colors {9}\v!cmyk\CMYKsupportedfalse\fi} - {\ifCMYKsupported\else\showmessage\m!colors{10}\v!cmyk\CMYKsupportedtrue \fi}% - \doifelse\@@clmpcmyk\v!no - {\ifMPcmykcolors \showmessage\m!colors {9}{\v!mp\v!cmyk}\MPcmykcolorsfalse\fi} - {\ifMPcmykcolors\else\showmessage\m!colors{10}{\v!mp\v!cmyk}\MPcmykcolorstrue \fi}% - \doifelse\@@clmpspot\v!no - {\ifMPspotcolors \showmessage\m!colors {9}{\v!mp\v!spot}\MPspotcolorsfalse\fi} - {\ifMPspotcolors\else\showmessage\m!colors{10}{\v!mp\v!spot}\MPspotcolorstrue \fi}% - \preferGRAYfalse - \processaction - [\@@clconversion] - [ \v!yes=>\preferGRAYtrue, - \v!always=>\preferGRAYtrue\RGBsupportedfalse\CMYKsupportedfalse]% - \ifRGBsupported - \converttoGRAYfalse - \forcegrayMPcolorsfalse - \else\ifCMYKsupported - \converttoGRAYfalse - \forcegrayMPcolorsfalse - \convertMPcolorstrue - \ifreduceCMYK - \reduceMPcolorstrue - \fi - \else - \ifconverttoGRAY\else\showmessage\m!colors{11}\empty\fi - \converttoGRAYtrue - \forcegrayMPcolorstrue - \convertMPcolorsfalse - \reduceMPcolorsfalse - \fi\fi - \processaction - [\@@clstate] - [ \v!global=>\ifincolor\else\showmessage\m!colors1\colorstyle\fi - \incolortrue\localcolorfalse, - \v!local=>\ifincolor\else\showmessage\m!colors2\colorstyle\fi - \incolortrue\localcolortrue, - \v!start=>\ifincolor\else\showmessage\m!colors1\colorstyle\fi - \incolortrue\localcolorfalse - \let\@@clstate\v!global, - \v!stop=>\incolorfalse\localcolorfalse - \forcegrayMPcolorstrue]% - \dosetupcolormodel - \initializemaintextcolor} - -%D \macros -%D {startregistercolor,stopregistercolor,permitcolormode} -%D -%D If you only want to register a color, the switch \type -%D {\ifpermitcolormode} can be used. That way the nested -%D colors know where to go back to. - -\ifx\startregistercolor\undefined - \def\startregistercolor[#1]{} - \def\stopregistercolor {} -\fi - -%D We use these macros for implementing text colors -%D (actually, the first application was in foreground -%D colors). -%D -%D \starttyping -%D \starttextcolor[red] -%D \dorecurse{10}{\input tufte \color[green]{oeps} \par} -%D \stoptextcolor -%D \stoptyping -%D -%D This is more efficient than the alternative: -%D -%D \starttyping -%D \setupbackgrounds[text][foregroundcolor=red] -%D \startregistercolor[red] -%D \dorecurse{10}{\input tufte \color[green]{oeps} \par} -%D \stopregistercolor -%D \stoptyping - -\def\maintextcolor {} -\def\defaulttextcolor {black} -\def\@@themaintextcolor{themaintextcolor} - -\ifx\initializemaintextcolor\undefined - \def\starttextcolor [#1]{} - \def\stoptextcolor {} - \def\initializemaintextcolor {} -\fi - -\ifx\restoretextcolor\undefined % to be redone - \let\restoretextcolor \firstofoneargument - \let\localstarttextcolor\relax - \let\localstoptextcolor \relax -\fi - -%D In this documentation we will not go into too much details -%D on palets. Curious users can find more information on this -%D topic in \from[use of color]. -%D -%D At the moment we implemented color in \CONTEXT\ color -%D printing was not yet on the desktop. In spite of this lack our -%D graphics designer made colorfull illustrations. When printed -%D on a black and white printer, distinctive colors can come -%D out equally gray. We therefore decided to use only colors -%D that were distinctive in colors as well as in black and -%D white print. -%D -%D Although none of the graphic packages we used supported -%D logical colors and global color redefition, we build this -%D support into \CONTEXT. This enabled us to experiment and -%D also prepared us for the future. - -%D \macros -%D {definepalet} -%D -%D Colors are grouped in palets. The colors in such a palet can -%D have colorful names, but best is to use names that specify -%D their use, like {\em important} or {\em danger}. As a sort -%D of example \CONTEXT\ has some palets predefined, -%D like:\footnote{At the time I wrote the palet support, I was -%D reading 'A hort history of time' of S.~Hawkins, so that's -%D why we stuck to quarks.} -%D -%D \starttyping -%D \definepalet -%D [alfa] -%D [ top=rood:7, -%D bottom=groen:6, -%D up=blauw:5, -%D down=cyaan:4, -%D strange=magenta:3, -%D charm=geel:2] -%D \stoptyping -%D -%D It's formal definition is: -%D -%D \showsetup{definepalet} -%D -%D Visualized, such a palet looks like: -%D -%D \startbuffer[palet] -%D \showpalet [alfa] [horizontal,name,number,value] -%D \stopbuffer -%D -%D \startlinecorrection -%D \getbuffer[palet] -%D \stoplinecorrection -%D -%D This bar shows both the color and gray alternatives of the -%D palet components (not visible in black and white print). -%D -%D When needed, one can copy a palet by saying: -%D -%D \starttyping -%D \definepalet [TEXcolorpretty] [colorpretty] -%D \stoptyping -%D -%D This saves us some typing in for instance the modules that -%D deal with pretty verbatim typesetting. - -\def\definepalet - {\dodoubleargument\dodefinepalet} - -\def\dodefinepalet[#1][#2]% - {\doifassignmentelse{#2} - {%\showmessage\m!colors6{#1}% - \letvalue{\??pa#1}\empty - \setevalue{\??pa\??pa#1}{#2}% - \def\dodododefinepalet[##1=##2]% - {\doifvaluesomething{\??pa#1} - {\setevalue{\??pa#1}{\csname\??pa#1\endcsname,}}% - \setevalue{\??pa#1}{\csname\??pa#1\endcsname##1}% - \dodefinepaletcolor{#1}{##1}{##2}}% - \def\dododefinepalet##1% - {\dodododefinepalet[##1]}% - \processcommalist[#2]\dododefinepalet} - {\doifdefined{\??pa#2} - {\expanded{\dodefinepalet[#1][\csname\??pa\??pa#2\endcsname]}}}} - -\ifx\dodefinepaletcolor\undefined - \let\dodefinepaletcolor\gobblethreearguments -\fi - -\let\paletsize\!!zerocount - -\def\getpaletsize[#1]% - {\getcommacommandsize[\csname\??pa\??pa#1\endcsname]% - \edef\paletsize{\number\commalistsize}} - -%D Instead of refering to colors, one can also directly specify -%D a color: -%D -%D \starttyping -%D \definepalet[test][xx=green] -%D \definepalet[test][xx={y=.4}] -%D \stoptyping - -%D \macros -%D {setuppalet} -%D -%D Colors are taken from the current palet, if defined. -%D Setting the current palet is done by: -%D -%D \showsetup{setuppalet} - -\let\currentpalet\empty - -\def\setuppalet - {\dosingleempty\dosetuppalet} - -\def\dosetuppalet[#1]% - {\edef\currentpalet{#1}% - \ifx\currentpalet\empty - % seems to be a reset - \else\ifcsname\??pa\currentpalet\endcsname - \edef\currentpalet{#1:}% - \else - \showmessage\m!colors7\currentpalet - \let\currentpalet\empty - \fi\fi} - -%D \macros -%D {showpalet} -%D -%D The previous visualization was typeset with: -%D -%D \typebuffer[palet] -%D -%D This commands is defined as: -%D -%D \showsetup{showpalet} - -\fetchruntimecommand \showpalet {\f!colorprefix\s!run} - -%D \macros -%D {showcolorcomponents} -%D -%D \starttyping -%D \showcolorcomponents[color-1,color-2] -%D \stoptyping - -\fetchruntimecommand \showcolorcomponents {\f!colorprefix\s!run} - -%D \macros -%D {definecolorgroup} -%D -%D The naming of the colors in this palet suggests some -%D ordening, which in turn is suported by color grouping. -%D -%D \starttyping -%D \definecolorgroup -%D [red] -%D [1.00:0.90:0.90, -%D 1.00:0.80:0.80, -%D 1.00:0.70:0.70, -%D 1.00:0.55:0.55, -%D 1.00:0.40:0.40, -%D 1.00:0.25:0.25, -%D 1.00:0.15:0.15, -%D 0.90:0.00:0.00] -%D \stoptyping -%D -%D In such a color group colors are numbered from~$1$ to~$n$. -%D -%D \showsetup{definecolorgroup} -%D -%D This kind of specification is not only more compact than -%D defining each color separate, it also loads faster and takes -%D less bytes. - -\def\definecolorgroup - {\dotripleempty\dodefinecolorgroup} - -\def\dododefinecolorgroupgray [#1][#2:#3]{\definecolor [#1:\the\colorcount][s=#2]} -\def\dododefinecolorgrouprgb [#1][#2:#3:#4:#5]{\definecolor [#1:\the\colorcount][r=#2,g=#3,b=#4]} -\def\dododefinecolorgroupcmyk[#1][#2:#3:#4:#5:#6]{\definecolor [#1:\the\colorcount][c=#2,m=#3=,y=#4,k=#5]} -\def\dododefinecolorgroupspot [#1][#2:#3:#4]{\definespotolor[#1:\the\colorcount][#2][p=#3]} - -\def\dododefinecolorgroup#1#2% - {\advance\colorcount\plusone - \getvalue{dododefinecolorgroup\currentcolorspace}[#1][#2:0:0:0:0]} - -\def\dodefinecolorgroup[#1][#2][#3]% obsolete, just use palets - {\ifthirdargument - \doifelsenothing{#2}{\let\currentcolorspace\v!rgb}{\def\currentcolorspace{#2}}% - \colorcount\zerocount - \processcommalist[#3]{\dododefinecolorgroup{#1}}% - \else - \doifinstringelse{:}{#2} - {\definecolorgroup[#1][\v!rgb][#2]} - {\doloop - {\doifdefinedelse{\??cr#2:\recurselevel} - {\setevalue{\??cr#1:\recurselevel}{\csname\??cr#2:\recurselevel\endcsname}} - {\exitloop}}}% - \fi} - -%D \macros -%D {showcolorgroup} -%D -%D We can show the group by: -%D -%D \startbuffer -%D \showcolorgroup [blue] [horizontal,name,number,value] -%D \stopbuffer -%D -%D \typebuffer -%D -%D or in color: -%D -%D \startlinecorrection -%D \getbuffer -%D \stoplinecorrection -%D -%D which uses: -%D -%D \showsetup{showcolorgroup} - -\fetchruntimecommand \showcolorgroup {\f!colorprefix\s!run} - -%D There are ten predefined color groups, like -%D \color[green]{\em groen}, \color[red]{\em rood}, -%D \color[blue]{\em blauw}, \color[cyan]{\em cyaan}, -%D \color[magenta]{\em magenta} and \color[yellow]{\em geel}. -%D -%D \startlinecorrection -%D \hbox to \hsize -%D {\hss -%D \showcolorgroup [red] [vertical,name,number]\hss -%D \showcolorgroup [green] [vertical,name]\hss -%D \showcolorgroup [blue] [vertical,name]\hss -%D \showcolorgroup [cyan] [vertical,name]\hss -%D \showcolorgroup [magenta][vertical,name]\hss -%D \showcolorgroup [yellow] [vertical,name]\hss} -%D \stoplinecorrection -%D -%D These groups are used to define palets {\em alfa} upto {\em -%D zeta}. As long as we don't use colors from the same row, we -%D get ourselves distinctive palets. By activating such a palet -%D one gains access to its members {\em top} to {\em charm} (of -%D course one should use more suitable names than these). -%D -%D \startlinecorrection -%D \hbox to \hsize -%D {\showpalet [alfa] [vertical,name,number]\hss -%D \showpalet [beta] [vertical,name]\hss -%D \showpalet [gamma] [vertical,name]\hss -%D \showpalet [delta] [vertical,name]\hss -%D \showpalet [epsilon] [vertical,name]\hss -%D \showpalet [zeta] [vertical,name]} -%D \stoplinecorrection -%D -%D By using the keyword \type {value} the individual color -%D components are shown too. When printed in color, these -%D showcases show both the colors and the gray value. - -%D \macros -%D {comparepalet} -%D -%D There are some more testing macros available: -%D -%D \startbuffer -%D \comparepalet [alfa] -%D \stopbuffer -%D -%D \typebuffer -%D -%D shows the palet colors against a background: -%D -%D \startlinecorrection -%D \getbuffer -%D \stoplinecorrection -%D -%D The formal definition is: -%D -%D \showsetup{comparepalet} - -\fetchruntimecommand \comparepalet {\f!colorprefix\s!run} - -%D \macros -%D {comparecolorgroup} -%D -%D The similar command: -%D -%D \startbuffer -%D \comparecolorgroup [blue] -%D \stopbuffer -%D -%D \typebuffer -%D -%D shows color groups: -%D -%D \startlinecorrection -%D \getbuffer -%D \stoplinecorrection -%D -%D this commands are defined as: -%D -%D \showsetup{comparecolorgroup} - -\fetchruntimecommand \comparecolorgroup {\f!colorprefix\s!run} - -%D \macros -%D {showcolor} -%D -%D But let's not forget that we also have the more traditional -%D non||related colors. These show up after: -%D -%D \starttyping -%D \showcolor [name] -%D \stoptyping -%D -%D Where \type{name} for instance can be \type{rgb}. -%D -%D \showsetup{showcolor} - -\fetchruntimecommand \showcolor {\f!colorprefix\s!run} - -%D It would make sense to put the following code in \type -%D {colo-mps}, but it it rather low level. - -%D \macros -%D {negatecolorcomponent,negatedcolorcomponent} -%D -%D These speak for themselves. See \type {colo-ext} for usage. - -\def\negatecolorcomponent#1% #1 = \macro - {\scratchdimen\onepoint\advance\scratchdimen-#1\onepoint - \ifdim\scratchdimen<\zeropoint\scratchdimen\zeropoint\fi - \edef#1{\withoutpt\the\scratchdimen}} - -\let\negatedcolorcomponent\firstofoneargument - -\def\negatedcolorcomponent#1% - {\ifdim\dimexpr\onepoint-#1\onepoint\relax<\zeropoint - \!!zerocount - \else - \expandafter\withoutpt\the\dimexpr\onepoint-#1\onepoint\relax - \fi} - -\def\negatecolorcomponent#1% #1 = \macro - {\edef#1{\negatedcolorcomponent{#1}}} - -%D \macros -%D {ifMPgraphics, ifMPcmykcolors, MPcolor} -%D -%D A very special macro is \type{\MPcolor}. This one can be -%D used to pass a \CONTEXT\ color to \METAPOST. -%D -%D \starttyping -%D \MPcolor{my own red} -%D \stoptyping -%D -%D This macro returns a \METAPOST\ triplet \type{(R,G,B)}. -%D Unless \CMYK\ color support is turned on with \type -%D {MPcmyk}, only \cap{RGB} colors and gray scales are -%D supported. - -\newif\ifMPcmykcolors % \MPcmykcolorsfalse -\newif\ifMPspotcolors % \MPspotcolorsfalse - -\ifx\MPcolor\undefined - \def\MPcolor#1{(0,0,0)} -\fi - -%D \macros -%D {PDFcolor,FDFcolor} -%D -%D Similar alternatives are avaliable for \PDF: - -%D For the moment we keep the next downward compatibility -%D switch, i.e.\ expanded colors. However, predefined colors -%D and palets are no longer expanded (which is what I wanted -%D in the first place). -%D -%D Well, in case we want to do color separation and use CMYK -%D colors only, this is dangerous since unwanted remapping may -%D take place. Especially when we redefine already defined -%D colors in another color space (e.g. darkgreen is -%D predefined in RGB color space, so a redefinition in CMYK -%D coordinates before RGB mode is disabled, would give -%D unexpected results due to the already frozen color spec.) -%D -%D So, from now on, colors are not frozen any more! - -% \appendtoks\setupcolors[\c!expansie=\v!ja]\to\everyjob - -\chardef\currentcolorchannel=0 - -\newif\iffilterspotcolor \filterspotcolorfalse -\newif\ifdoingspotcolor \doingspotcolorfalse - -\def\registercolorchannel#1% - {\ifdoingspotcolor \else - \global\expandafter\chardef\csname\??cs#1\endcsname\zerocount - \fi} - -\newif\ifhidesplitcolor \hidesplitcolortrue - -%D The next macro is for instance used in figure splitting: - -\def\doifseparatingcolorselse - {\iffilterspotcolor - \@EA\firstoftwoarguments - \else\ifcase\currentcolorchannel - \@EAEAEA\secondoftwoarguments - \else - \@EAEAEA\firstoftwoarguments - \fi\fi} - -\def\doifcolorchannelelse#1% - {\doifseparatingcolorselse - {\doifelsenothing{#1} - \secondoftwoarguments - {\doifelse{#1}\@@clsplit - \firstoftwoarguments - \secondoftwoarguments}} - \secondoftwoarguments} - -\def\resetcolorseparation - {\filterspotcolorfalse - \chardef\currentcolorchannel\zerocount} - -%D These can be used in selecting specific files (like -%D figuredatabases). - -% we already have: -% -% \def\colorsplitsuffix{\ifcase\currentcolorchannel\else-\@@clsplitsen\fi} -% \def\colorsplitprefix{\ifcase\currentcolorchannel\else\@@clsplitsen-\fi} - -\def\colorchannelprefix{\doifseparatingcolorselse\@@clsplit\empty-} -\def\colorchannelsuffix{-\doifseparatingcolorselse\@@clsplit\empty} - -%D We now load the low level macros: - -\loadmarkfile{colo-new} - -%D We default to the colors defined in \module{colo-rgb} and -%D support both \cap{RGB} and \cap{CMYK} output. As you can -%D see, color support is turned off by default. Reduction of -%D gray colors to gray scales is turned on. - -\definecolor[black][s=0] -\definecolor[white][s=1] - -\definetransparency [none] [0] -\definetransparency [normal] [1] -\definetransparency [multiply] [2] -\definetransparency [screen] [3] -\definetransparency [overlay] [4] -\definetransparency [softlight] [5] -\definetransparency [hardlight] [6] -\definetransparency [colordodge] [7] -\definetransparency [colorburn] [8] -\definetransparency [darken] [9] -\definetransparency [lighten] [10] -\definetransparency [difference] [11] -\definetransparency [exclusion] [12] - -\setupcolors - [\c!state=\v!stop, - \c!conversion=\v!yes, - \c!reduction=\v!no, - \c!rgb=\v!yes, - \c!cmyk=\v!yes, - \c!spot=\v!yes, - \c!mp\c!cmyk=\@@clcmyk, - \c!mp\c!spot=\@@clspot, - \c!expansion=\v!no, - \c!textcolor=, - \c!split=\v!no, - \c!criterium=\v!all] - -\setupcolor - [\v!rgb] - -\protect \endinput diff --git a/Master/texmf-dist/tex/context/base/colo-x11.tex b/Master/texmf-dist/tex/context/base/colo-x11.tex new file mode 100644 index 00000000000..45d3aac6248 --- /dev/null +++ b/Master/texmf-dist/tex/context/base/colo-x11.tex @@ -0,0 +1,677 @@ +%D \module +%D [ file=colo-x11, +%D version=2009.11.13, +%D title=\CONTEXT\ Color Macros, +%D subtitle=X11, +%D author=Alan Braslau] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +%D Standard X11 rgb colors (from \type {/usr/share/X11/rgb.txt}): + +\doifnotmode{mkiv} { + \input colo-hex.mkii +} + +\definecolor [snow] [h=fffafa] +\definecolor [ghostwhite] [h=f8f8ff] +\definecolor [whitesmoke] [s=0.96] +\definecolor [gainsboro] [s=0.86] +\definecolor [floralwhite] [h=fffaf0] +\definecolor [oldlace] [h=fdf5e6] +\definecolor [linen] [h=faf0e6] +\definecolor [antiquewhite] [h=faebd7] +\definecolor [papayawhip] [h=ffefd5] +\definecolor [blanchedalmond] [h=ffebcd] +\definecolor [bisque] [h=ffe4c4] +\definecolor [peachpuff] [h=ffdab9] +\definecolor [navajowhite] [h=ffdead] +\definecolor [moccasin] [h=ffe4b5] +\definecolor [cornsilk] [h=fff8dc] +\definecolor [ivory] [h=fffff0] +\definecolor [lemonchiffon] [h=fffacd] +\definecolor [seashell] [h=fff5ee] +\definecolor [honeydew] [h=f0fff0] +\definecolor [mintcream] [h=f5fffa] +\definecolor [azure] [h=f0ffff] +\definecolor [aliceblue] [h=f0f8ff] +\definecolor [lavender] [h=e6e6fa] +\definecolor [lavenderblush] [h=fff0f5] +\definecolor [mistyrose] [h=ffe4e1] +\definecolor [white] [s=1] +\definecolor [black] [s=0] +\definecolor [darkslategray] [h=2f4f4f] +\definecolor [darkslategrey] [darkslategray] +\definecolor [dimgray] [s=0.41] +\definecolor [dimgrey] [dimgray] +\definecolor [slategray] [h=708090] +\definecolor [slategrey] [slategray] +\definecolor [lightslategray] [h=778899] +\definecolor [lightslategrey] [lightslategray] +\definecolor [gray] [s=0.75] +\definecolor [grey] [gray] +\definecolor [lightgrey] [s=0.83] +\definecolor [lightgray] [lightgrey] +\definecolor [midnightblue] [h=191970] +\definecolor [navy] [h=000080] +\definecolor [navyblue] [navy] +\definecolor [cornflowerblue] [h=6495ed] +\definecolor [darkslateblue] [h=483d8b] +\definecolor [slateblue] [h=6a5acd] +\definecolor [mediumslateblue] [h=7b68ee] +\definecolor [lightslateblue] [h=8470ff] +\definecolor [mediumblue] [h=0000cd] +\definecolor [royalblue] [h=4169e1] +\definecolor [blue] [h=0000ff] +\definecolor [dodgerblue] [h=1e90ff] +\definecolor [deepskyblue] [h=00bfff] +\definecolor [skyblue] [h=87ceeb] +\definecolor [lightskyblue] [h=87cefa] +\definecolor [steelblue] [h=4682b4] +\definecolor [lightsteelblue] [h=b0c4de] +\definecolor [lightblue] [h=add8e6] +\definecolor [powderblue] [h=b0e0e6] +\definecolor [paleturquoise] [h=afeeee] +\definecolor [darkturquoise] [h=00ced1] +\definecolor [mediumturquoise] [h=48d1cc] +\definecolor [turquoise] [h=40e0d0] +\definecolor [cyan] [h=00ffff] +\definecolor [lightcyan] [h=e0ffff] +\definecolor [cadetblue] [h=5f9ea0] +\definecolor [mediumaquamarine] [h=66cdaa] +\definecolor [aquamarine] [h=7fffd4] +\definecolor [darkgreen] [h=006400] +\definecolor [darkolivegreen] [h=556b2f] +\definecolor [darkseagreen] [h=8fbc8f] +\definecolor [seagreen] [h=2e8b57] +\definecolor [mediumseagreen] [h=3cb371] +\definecolor [lightseagreen] [h=20b2aa] +\definecolor [palegreen] [h=98fb98] +\definecolor [springgreen] [h=00ff7f] +\definecolor [lawngreen] [h=7cfc00] +\definecolor [green] [h=00ff00] +\definecolor [chartreuse] [h=7fff00] +\definecolor [mediumspringgreen] [h=00fa9a] +\definecolor [greenyellow] [h=adff2f] +\definecolor [limegreen] [h=32cd32] +\definecolor [yellowgreen] [h=9acd32] +\definecolor [forestgreen] [h=228b22] +\definecolor [olivedrab] [h=6b8e23] +\definecolor [darkkhaki] [h=bdb76b] +\definecolor [khaki] [h=f0e68c] +\definecolor [palegoldenrod] [h=eee8aa] +\definecolor [lightgoldenrodyellow] [h=fafad2] +\definecolor [lightyellow] [h=ffffe0] +\definecolor [yellow] [h=ffff00] +\definecolor [gold] [h=ffd700] +\definecolor [lightgoldenrod] [h=eedd82] +\definecolor [goldenrod] [h=daa520] +\definecolor [darkgoldenrod] [h=b8860b] +\definecolor [rosybrown] [h=bc8f8f] +\definecolor [indianred] [h=cd5c5c] +\definecolor [saddlebrown] [h=8b4513] +\definecolor [sienna] [h=a0522d] +\definecolor [peru] [h=cd853f] +\definecolor [burlywood] [h=deb887] +\definecolor [beige] [h=f5f5dc] +\definecolor [wheat] [h=f5deb3] +\definecolor [sandybrown] [h=f4a460] +\definecolor [tan] [h=d2b48c] +\definecolor [chocolate] [h=d2691e] +\definecolor [firebrick] [h=b22222] +\definecolor [brown] [h=a52a2a] +\definecolor [darksalmon] [h=e9967a] +\definecolor [salmon] [h=fa8072] +\definecolor [lightsalmon] [h=ffa07a] +\definecolor [orange] [h=ffa500] +\definecolor [darkorange] [h=ff8c00] +\definecolor [coral] [h=ff7f50] +\definecolor [lightcoral] [h=f08080] +\definecolor [tomato] [h=ff6347] +\definecolor [orangered] [h=ff4500] +\definecolor [red] [h=ff0000] +\definecolor [hotpink] [h=ff69b4] +\definecolor [deeppink] [h=ff1493] +\definecolor [pink] [h=ffc0cb] +\definecolor [lightpink] [h=ffb6c1] +\definecolor [palevioletred] [h=db7093] +\definecolor [maroon] [h=b03060] +\definecolor [mediumvioletred] [h=c71585] +\definecolor [violetred] [h=d02090] +\definecolor [magenta] [h=ff00ff] +\definecolor [violet] [h=ee82ee] +\definecolor [plum] [h=dda0dd] +\definecolor [orchid] [h=da70d6] +\definecolor [mediumorchid] [h=ba55d3] +\definecolor [darkorchid] [h=9932cc] +\definecolor [darkviolet] [h=9400d3] +\definecolor [blueviolet] [h=8a2be2] +\definecolor [purple] [h=a020f0] +\definecolor [mediumpurple] [h=9370db] +\definecolor [thistle] [h=d8bfd8] +\definecolor [snow1] [h=fffafa] +\definecolor [snow2] [h=eee9e9] +\definecolor [snow3] [h=cdc9c9] +\definecolor [snow4] [h=8b8989] +\definecolor [seashell1] [h=fff5ee] +\definecolor [seashell2] [h=eee5de] +\definecolor [seashell3] [h=cdc5bf] +\definecolor [seashell4] [h=8b8682] +\definecolor [antiquewhite1] [h=ffefdb] +\definecolor [antiquewhite2] [h=eedfcc] +\definecolor [antiquewhite3] [h=cdc0b0] +\definecolor [antiquewhite4] [h=8b8378] +\definecolor [bisque1] [h=ffe4c4] +\definecolor [bisque2] [h=eed5b7] +\definecolor [bisque3] [h=cdb79e] +\definecolor [bisque4] [h=8b7d6b] +\definecolor [peachpuff1] [h=ffdab9] +\definecolor [peachpuff2] [h=eecbad] +\definecolor [peachpuff3] [h=cdaf95] +\definecolor [peachpuff4] [h=8b7765] +\definecolor [navajowhite1] [h=ffdead] +\definecolor [navajowhite2] [h=eecfa1] +\definecolor [navajowhite3] [h=cdb38b] +\definecolor [navajowhite4] [h=8b795e] +\definecolor [lemonchiffon1] [h=fffacd] +\definecolor [lemonchiffon2] [h=eee9bf] +\definecolor [lemonchiffon3] [h=cdc9a5] +\definecolor [lemonchiffon4] [h=8b8970] +\definecolor [cornsilk1] [h=fff8dc] +\definecolor [cornsilk2] [h=eee8cd] +\definecolor [cornsilk3] [h=cdc8b1] +\definecolor [cornsilk4] [h=8b8878] +\definecolor [ivory1] [h=fffff0] +\definecolor [ivory2] [h=eeeee0] +\definecolor [ivory3] [h=cdcdc1] +\definecolor [ivory4] [h=8b8b83] +\definecolor [honeydew1] [h=f0fff0] +\definecolor [honeydew2] [h=e0eee0] +\definecolor [honeydew3] [h=c1cdc1] +\definecolor [honeydew4] [h=838b83] +\definecolor [lavenderblush1] [h=fff0f5] +\definecolor [lavenderblush2] [h=eee0e5] +\definecolor [lavenderblush3] [h=cdc1c5] +\definecolor [lavenderblush4] [h=8b8386] +\definecolor [mistyrose1] [h=ffe4e1] +\definecolor [mistyrose2] [h=eed5d2] +\definecolor [mistyrose3] [h=cdb7b5] +\definecolor [mistyrose4] [h=8b7d7b] +\definecolor [azure1] [h=f0ffff] +\definecolor [azure2] [h=e0eeee] +\definecolor [azure3] [h=c1cdcd] +\definecolor [azure4] [h=838b8b] +\definecolor [slateblue1] [h=836fff] +\definecolor [slateblue2] [h=7a67ee] +\definecolor [slateblue3] [h=6959cd] +\definecolor [slateblue4] [h=473c8b] +\definecolor [royalblue1] [h=4876ff] +\definecolor [royalblue2] [h=436eee] +\definecolor [royalblue3] [h=3a5fcd] +\definecolor [royalblue4] [h=27408b] +\definecolor [blue1] [h=0000ff] +\definecolor [blue2] [h=0000ee] +\definecolor [blue3] [h=0000cd] +\definecolor [blue4] [h=00008b] +\definecolor [dodgerblue1] [h=1e90ff] +\definecolor [dodgerblue2] [h=1c86ee] +\definecolor [dodgerblue3] [h=1874cd] +\definecolor [dodgerblue4] [h=104e8b] +\definecolor [steelblue1] [h=63b8ff] +\definecolor [steelblue2] [h=5cacee] +\definecolor [steelblue3] [h=4f94cd] +\definecolor [steelblue4] [h=36648b] +\definecolor [deepskyblue1] [h=00bfff] +\definecolor [deepskyblue2] [h=00b2ee] +\definecolor [deepskyblue3] [h=009acd] +\definecolor [deepskyblue4] [h=00688b] +\definecolor [skyblue1] [h=87ceff] +\definecolor [skyblue2] [h=7ec0ee] +\definecolor [skyblue3] [h=6ca6cd] +\definecolor [skyblue4] [h=4a708b] +\definecolor [lightskyblue1] [h=b0e2ff] +\definecolor [lightskyblue2] [h=a4d3ee] +\definecolor [lightskyblue3] [h=8db6cd] +\definecolor [lightskyblue4] [h=607b8b] +\definecolor [slategray1] [h=c6e2ff] +\definecolor [slategray2] [h=b9d3ee] +\definecolor [slategray3] [h=9fb6cd] +\definecolor [slategray4] [h=6c7b8b] +\definecolor [lightsteelblue1] [h=cae1ff] +\definecolor [lightsteelblue2] [h=bcd2ee] +\definecolor [lightsteelblue3] [h=a2b5cd] +\definecolor [lightsteelblue4] [h=6e7b8b] +\definecolor [lightblue1] [h=bfefff] +\definecolor [lightblue2] [h=b2dfee] +\definecolor [lightblue3] [h=9ac0cd] +\definecolor [lightblue4] [h=68838b] +\definecolor [lightcyan1] [h=e0ffff] +\definecolor [lightcyan2] [h=d1eeee] +\definecolor [lightcyan3] [h=b4cdcd] +\definecolor [lightcyan4] [h=7a8b8b] +\definecolor [paleturquoise1] [h=bbffff] +\definecolor [paleturquoise2] [h=aeeeee] +\definecolor [paleturquoise3] [h=96cdcd] +\definecolor [paleturquoise4] [h=668b8b] +\definecolor [cadetblue1] [h=98f5ff] +\definecolor [cadetblue2] [h=8ee5ee] +\definecolor [cadetblue3] [h=7ac5cd] +\definecolor [cadetblue4] [h=53868b] +\definecolor [turquoise1] [h=00f5ff] +\definecolor [turquoise2] [h=00e5ee] +\definecolor [turquoise3] [h=00c5cd] +\definecolor [turquoise4] [h=00868b] +\definecolor [cyan1] [h=00ffff] +\definecolor [cyan2] [h=00eeee] +\definecolor [cyan3] [h=00cdcd] +\definecolor [cyan4] [h=008b8b] +\definecolor [darkslategray1] [h=97ffff] +\definecolor [darkslategray2] [h=8deeee] +\definecolor [darkslategray3] [h=79cdcd] +\definecolor [darkslategray4] [h=528b8b] +\definecolor [aquamarine1] [h=7fffd4] +\definecolor [aquamarine2] [h=76eec6] +\definecolor [aquamarine3] [h=66cdaa] +\definecolor [aquamarine4] [h=458b74] +\definecolor [darkseagreen1] [h=c1ffc1] +\definecolor [darkseagreen2] [h=b4eeb4] +\definecolor [darkseagreen3] [h=9bcd9b] +\definecolor [darkseagreen4] [h=698b69] +\definecolor [seagreen1] [h=54ff9f] +\definecolor [seagreen2] [h=4eee94] +\definecolor [seagreen3] [h=43cd80] +\definecolor [seagreen4] [h=2e8b57] +\definecolor [palegreen1] [h=9aff9a] +\definecolor [palegreen2] [h=90ee90] +\definecolor [palegreen3] [h=7ccd7c] +\definecolor [palegreen4] [h=548b54] +\definecolor [springgreen1] [h=00ff7f] +\definecolor [springgreen2] [h=00ee76] +\definecolor [springgreen3] [h=00cd66] +\definecolor [springgreen4] [h=008b45] +\definecolor [green1] [h=00ff00] +\definecolor [green2] [h=00ee00] +\definecolor [green3] [h=00cd00] +\definecolor [green4] [h=008b00] +\definecolor [chartreuse1] [h=7fff00] +\definecolor [chartreuse2] [h=76ee00] +\definecolor [chartreuse3] [h=66cd00] +\definecolor [chartreuse4] [h=458b00] +\definecolor [olivedrab1] [h=c0ff3e] +\definecolor [olivedrab2] [h=b3ee3a] +\definecolor [olivedrab3] [h=9acd32] +\definecolor [olivedrab4] [h=698b22] +\definecolor [darkolivegreen1] [h=caff70] +\definecolor [darkolivegreen2] [h=bcee68] +\definecolor [darkolivegreen3] [h=a2cd5a] +\definecolor [darkolivegreen4] [h=6e8b3d] +\definecolor [khaki1] [h=fff68f] +\definecolor [khaki2] [h=eee685] +\definecolor [khaki3] [h=cdc673] +\definecolor [khaki4] [h=8b864e] +\definecolor [lightgoldenrod1] [h=ffec8b] +\definecolor [lightgoldenrod2] [h=eedc82] +\definecolor [lightgoldenrod3] [h=cdbe70] +\definecolor [lightgoldenrod4] [h=8b814c] +\definecolor [lightyellow1] [h=ffffe0] +\definecolor [lightyellow2] [h=eeeed1] +\definecolor [lightyellow3] [h=cdcdb4] +\definecolor [lightyellow4] [h=8b8b7a] +\definecolor [yellow1] [h=ffff00] +\definecolor [yellow2] [h=eeee00] +\definecolor [yellow3] [h=cdcd00] +\definecolor [yellow4] [h=8b8b00] +\definecolor [gold1] [h=ffd700] +\definecolor [gold2] [h=eec900] +\definecolor [gold3] [h=cdad00] +\definecolor [gold4] [h=8b7500] +\definecolor [goldenrod1] [h=ffc125] +\definecolor [goldenrod2] [h=eeb422] +\definecolor [goldenrod3] [h=cd9b1d] +\definecolor [goldenrod4] [h=8b6914] +\definecolor [darkgoldenrod1] [h=ffb90f] +\definecolor [darkgoldenrod2] [h=eead0e] +\definecolor [darkgoldenrod3] [h=cd950c] +\definecolor [darkgoldenrod4] [h=8b6508] +\definecolor [rosybrown1] [h=ffc1c1] +\definecolor [rosybrown2] [h=eeb4b4] +\definecolor [rosybrown3] [h=cd9b9b] +\definecolor [rosybrown4] [h=8b6969] +\definecolor [indianred1] [h=ff6a6a] +\definecolor [indianred2] [h=ee6363] +\definecolor [indianred3] [h=cd5555] +\definecolor [indianred4] [h=8b3a3a] +\definecolor [sienna1] [h=ff8247] +\definecolor [sienna2] [h=ee7942] +\definecolor [sienna3] [h=cd6839] +\definecolor [sienna4] [h=8b4726] +\definecolor [burlywood1] [h=ffd39b] +\definecolor [burlywood2] [h=eec591] +\definecolor [burlywood3] [h=cdaa7d] +\definecolor [burlywood4] [h=8b7355] +\definecolor [wheat1] [h=ffe7ba] +\definecolor [wheat2] [h=eed8ae] +\definecolor [wheat3] [h=cdba96] +\definecolor [wheat4] [h=8b7e66] +\definecolor [tan1] [h=ffa54f] +\definecolor [tan2] [h=ee9a49] +\definecolor [tan3] [h=cd853f] +\definecolor [tan4] [h=8b5a2b] +\definecolor [chocolate1] [h=ff7f24] +\definecolor [chocolate2] [h=ee7621] +\definecolor [chocolate3] [h=cd661d] +\definecolor [chocolate4] [h=8b4513] +\definecolor [firebrick1] [h=ff3030] +\definecolor [firebrick2] [h=ee2c2c] +\definecolor [firebrick3] [h=cd2626] +\definecolor [firebrick4] [h=8b1a1a] +\definecolor [brown1] [h=ff4040] +\definecolor [brown2] [h=ee3b3b] +\definecolor [brown3] [h=cd3333] +\definecolor [brown4] [h=8b2323] +\definecolor [salmon1] [h=ff8c69] +\definecolor [salmon2] [h=ee8262] +\definecolor [salmon3] [h=cd7054] +\definecolor [salmon4] [h=8b4c39] +\definecolor [lightsalmon1] [h=ffa07a] +\definecolor [lightsalmon2] [h=ee9572] +\definecolor [lightsalmon3] [h=cd8162] +\definecolor [lightsalmon4] [h=8b5742] +\definecolor [orange1] [h=ffa500] +\definecolor [orange2] [h=ee9a00] +\definecolor [orange3] [h=cd8500] +\definecolor [orange4] [h=8b5a00] +\definecolor [darkorange1] [h=ff7f00] +\definecolor [darkorange2] [h=ee7600] +\definecolor [darkorange3] [h=cd6600] +\definecolor [darkorange4] [h=8b4500] +\definecolor [coral1] [h=ff7256] +\definecolor [coral2] [h=ee6a50] +\definecolor [coral3] [h=cd5b45] +\definecolor [coral4] [h=8b3e2f] +\definecolor [tomato1] [h=ff6347] +\definecolor [tomato2] [h=ee5c42] +\definecolor [tomato3] [h=cd4f39] +\definecolor [tomato4] [h=8b3626] +\definecolor [orangered1] [h=ff4500] +\definecolor [orangered2] [h=ee4000] +\definecolor [orangered3] [h=cd3700] +\definecolor [orangered4] [h=8b2500] +\definecolor [red1] [h=ff0000] +\definecolor [red2] [h=ee0000] +\definecolor [red3] [h=cd0000] +\definecolor [red4] [h=8b0000] +\definecolor [debianred] [h=d70751] +\definecolor [deeppink1] [h=ff1493] +\definecolor [deeppink2] [h=ee1289] +\definecolor [deeppink3] [h=cd1076] +\definecolor [deeppink4] [h=8b0a50] +\definecolor [hotpink1] [h=ff6eb4] +\definecolor [hotpink2] [h=ee6aa7] +\definecolor [hotpink3] [h=cd6090] +\definecolor [hotpink4] [h=8b3a62] +\definecolor [pink1] [h=ffb5c5] +\definecolor [pink2] [h=eea9b8] +\definecolor [pink3] [h=cd919e] +\definecolor [pink4] [h=8b636c] +\definecolor [lightpink1] [h=ffaeb9] +\definecolor [lightpink2] [h=eea2ad] +\definecolor [lightpink3] [h=cd8c95] +\definecolor [lightpink4] [h=8b5f65] +\definecolor [palevioletred1] [h=ff82ab] +\definecolor [palevioletred2] [h=ee799f] +\definecolor [palevioletred3] [h=cd6889] +\definecolor [palevioletred4] [h=8b475d] +\definecolor [maroon1] [h=ff34b3] +\definecolor [maroon2] [h=ee30a7] +\definecolor [maroon3] [h=cd2990] +\definecolor [maroon4] [h=8b1c62] +\definecolor [violetred1] [h=ff3e96] +\definecolor [violetred2] [h=ee3a8c] +\definecolor [violetred3] [h=cd3278] +\definecolor [violetred4] [h=8b2252] +\definecolor [magenta1] [h=ff00ff] +\definecolor [magenta2] [h=ee00ee] +\definecolor [magenta3] [h=cd00cd] +\definecolor [magenta4] [h=8b008b] +\definecolor [orchid1] [h=ff83fa] +\definecolor [orchid2] [h=ee7ae9] +\definecolor [orchid3] [h=cd69c9] +\definecolor [orchid4] [h=8b4789] +\definecolor [plum1] [h=ffbbff] +\definecolor [plum2] [h=eeaeee] +\definecolor [plum3] [h=cd96cd] +\definecolor [plum4] [h=8b668b] +\definecolor [mediumorchid1] [h=e066ff] +\definecolor [mediumorchid2] [h=d15fee] +\definecolor [mediumorchid3] [h=b452cd] +\definecolor [mediumorchid4] [h=7a378b] +\definecolor [darkorchid1] [h=bf3eff] +\definecolor [darkorchid2] [h=b23aee] +\definecolor [darkorchid3] [h=9a32cd] +\definecolor [darkorchid4] [h=68228b] +\definecolor [purple1] [h=9b30ff] +\definecolor [purple2] [h=912cee] +\definecolor [purple3] [h=7d26cd] +\definecolor [purple4] [h=551a8b] +\definecolor [mediumpurple1] [h=ab82ff] +\definecolor [mediumpurple2] [h=9f79ee] +\definecolor [mediumpurple3] [h=8968cd] +\definecolor [mediumpurple4] [h=5d478b] +\definecolor [thistle1] [h=ffe1ff] +\definecolor [thistle2] [h=eed2ee] +\definecolor [thistle3] [h=cdb5cd] +\definecolor [thistle4] [h=8b7b8b] +\definecolor [gray0] [s=0.00] +\definecolor [grey0] [gray0] +\definecolor [gray1] [s=0.01] +\definecolor [grey1] [gray1] +\definecolor [gray2] [s=0.02] +\definecolor [grey2] [gray2] +\definecolor [gray3] [s=0.03] +\definecolor [grey3] [gray3] +\definecolor [gray4] [s=0.04] +\definecolor [grey4] [gray4] +\definecolor [gray5] [s=0.05] +\definecolor [grey5] [gray5] +\definecolor [gray6] [s=0.06] +\definecolor [grey6] [gray6] +\definecolor [gray7] [s=0.07] +\definecolor [grey7] [gray7] +\definecolor [gray8] [s=0.08] +\definecolor [grey8] [gray8] +\definecolor [gray9] [s=0.09] +\definecolor [grey9] [gray9] +\definecolor [gray10] [s=0.10] +\definecolor [grey10] [gray10] +\definecolor [gray11] [s=0.11] +\definecolor [grey11] [gray11] +\definecolor [gray12] [s=0.12] +\definecolor [grey12] [gray12] +\definecolor [gray13] [s=0.13] +\definecolor [grey13] [gray13] +\definecolor [gray14] [s=0.14] +\definecolor [grey14] [gray14] +\definecolor [gray15] [s=0.15] +\definecolor [grey15] [gray15] +\definecolor [gray16] [s=0.16] +\definecolor [grey16] [gray16] +\definecolor [gray17] [s=0.17] +\definecolor [grey17] [gray17] +\definecolor [gray18] [s=0.18] +\definecolor [grey18] [gray18] +\definecolor [gray19] [s=0.19] +\definecolor [grey19] [gray19] +\definecolor [gray20] [s=0.20] +\definecolor [grey20] [gray20] +\definecolor [gray21] [s=0.21] +\definecolor [grey21] [gray21] +\definecolor [gray22] [s=0.22] +\definecolor [grey22] [gray22] +\definecolor [gray23] [s=0.23] +\definecolor [grey23] [gray23] +\definecolor [gray24] [s=0.24] +\definecolor [grey24] [gray24] +\definecolor [gray25] [s=0.25] +\definecolor [grey25] [gray25] +\definecolor [gray26] [s=0.26] +\definecolor [grey26] [gray26] +\definecolor [gray27] [s=0.27] +\definecolor [grey27] [gray27] +\definecolor [gray28] [s=0.28] +\definecolor [grey28] [gray28] +\definecolor [gray29] [s=0.29] +\definecolor [grey29] [gray29] +\definecolor [gray30] [s=0.30] +\definecolor [grey30] [gray30] +\definecolor [gray31] [s=0.31] +\definecolor [grey31] [gray31] +\definecolor [gray32] [s=0.32] +\definecolor [grey32] [gray32] +\definecolor [gray33] [s=0.33] +\definecolor [grey33] [gray33] +\definecolor [gray34] [s=0.34] +\definecolor [grey34] [gray34] +\definecolor [gray35] [s=0.35] +\definecolor [grey35] [gray35] +\definecolor [gray36] [s=0.36] +\definecolor [grey36] [gray36] +\definecolor [gray37] [s=0.37] +\definecolor [grey37] [gray37] +\definecolor [gray38] [s=0.38] +\definecolor [grey38] [gray38] +\definecolor [gray39] [s=0.39] +\definecolor [grey39] [gray39] +\definecolor [gray40] [s=0.40] +\definecolor [grey40] [gray40] +\definecolor [gray41] [s=0.41] +\definecolor [grey41] [gray41] +\definecolor [gray42] [s=0.42] +\definecolor [grey42] [gray42] +\definecolor [gray43] [s=0.43] +\definecolor [grey43] [gray43] +\definecolor [gray44] [s=0.44] +\definecolor [grey44] [gray44] +\definecolor [gray45] [s=0.45] +\definecolor [grey45] [gray45] +\definecolor [gray46] [s=0.46] +\definecolor [grey46] [gray46] +\definecolor [gray47] [s=0.47] +\definecolor [grey47] [gray47] +\definecolor [gray48] [s=0.48] +\definecolor [grey48] [gray48] +\definecolor [gray49] [s=0.49] +\definecolor [grey49] [gray49] +\definecolor [gray50] [s=0.50] +\definecolor [grey50] [gray50] +\definecolor [gray51] [s=0.51] +\definecolor [grey51] [gray51] +\definecolor [gray52] [s=0.52] +\definecolor [grey52] [gray52] +\definecolor [gray53] [s=0.53] +\definecolor [grey53] [gray53] +\definecolor [gray54] [s=0.54] +\definecolor [grey54] [gray54] +\definecolor [gray55] [s=0.55] +\definecolor [grey55] [gray55] +\definecolor [gray56] [s=0.56] +\definecolor [grey56] [gray56] +\definecolor [gray57] [s=0.57] +\definecolor [grey57] [gray57] +\definecolor [gray58] [s=0.58] +\definecolor [grey58] [gray58] +\definecolor [gray59] [s=0.59] +\definecolor [grey59] [gray59] +\definecolor [gray60] [s=0.60] +\definecolor [grey60] [gray60] +\definecolor [gray61] [s=0.61] +\definecolor [grey61] [gray61] +\definecolor [gray62] [s=0.62] +\definecolor [grey62] [gray62] +\definecolor [gray63] [s=0.63] +\definecolor [grey63] [gray63] +\definecolor [gray64] [s=0.64] +\definecolor [grey64] [gray64] +\definecolor [gray65] [s=0.65] +\definecolor [grey65] [gray65] +\definecolor [gray66] [s=0.66] +\definecolor [grey66] [gray66] +\definecolor [gray67] [s=0.67] +\definecolor [grey67] [gray67] +\definecolor [gray68] [s=0.68] +\definecolor [grey68] [gray68] +\definecolor [gray69] [s=0.69] +\definecolor [grey69] [gray69] +\definecolor [gray70] [s=0.70] +\definecolor [grey70] [gray70] +\definecolor [gray71] [s=0.71] +\definecolor [grey71] [gray71] +\definecolor [gray72] [s=0.72] +\definecolor [grey72] [gray72] +\definecolor [gray73] [s=0.73] +\definecolor [grey73] [gray73] +\definecolor [gray74] [s=0.74] +\definecolor [grey74] [gray74] +\definecolor [gray75] [s=0.75] +\definecolor [grey75] [gray75] +\definecolor [gray76] [s=0.76] +\definecolor [grey76] [gray76] +\definecolor [gray77] [s=0.77] +\definecolor [grey77] [gray77] +\definecolor [gray78] [s=0.78] +\definecolor [grey78] [gray78] +\definecolor [gray79] [s=0.79] +\definecolor [grey79] [gray79] +\definecolor [gray80] [s=0.80] +\definecolor [grey80] [gray80] +\definecolor [gray81] [s=0.81] +\definecolor [grey81] [gray81] +\definecolor [gray82] [s=0.82] +\definecolor [grey82] [gray82] +\definecolor [gray83] [s=0.83] +\definecolor [grey83] [gray83] +\definecolor [gray84] [s=0.84] +\definecolor [grey84] [gray84] +\definecolor [gray85] [s=0.85] +\definecolor [grey85] [gray85] +\definecolor [gray86] [s=0.86] +\definecolor [grey86] [gray86] +\definecolor [gray87] [s=0.87] +\definecolor [grey87] [gray87] +\definecolor [gray88] [s=0.88] +\definecolor [grey88] [gray88] +\definecolor [gray89] [s=0.89] +\definecolor [grey89] [gray89] +\definecolor [gray90] [s=0.90] +\definecolor [grey90] [gray90] +\definecolor [gray91] [s=0.91] +\definecolor [grey91] [gray91] +\definecolor [gray92] [s=0.92] +\definecolor [grey92] [gray92] +\definecolor [gray93] [s=0.93] +\definecolor [grey93] [gray93] +\definecolor [gray94] [s=0.94] +\definecolor [grey94] [gray94] +\definecolor [gray95] [s=0.95] +\definecolor [grey95] [gray95] +\definecolor [gray96] [s=0.96] +\definecolor [grey96] [gray96] +\definecolor [gray97] [s=0.97] +\definecolor [grey97] [gray97] +\definecolor [gray98] [s=0.98] +\definecolor [grey98] [gray98] +\definecolor [gray99] [s=0.99] +\definecolor [grey99] [gray99] +\definecolor [gray100] [s=1.00] +\definecolor [grey100] [gray100] +\definecolor [darkgrey] [s=0.66] +\definecolor [darkgray] [darkgrey] +\definecolor [darkblue] [h=00008b] +\definecolor [darkcyan] [h=008b8b] +\definecolor [darkmagenta] [h=8b008b] +\definecolor [darkred] [h=8b0000] +\definecolor [lightgreen] [h=90ee90] + +\endinput diff --git a/Master/texmf-dist/tex/context/base/cont-cz.tex b/Master/texmf-dist/tex/context/base/cont-cz.tex deleted file mode 100644 index b86b21e869a..00000000000 --- a/Master/texmf-dist/tex/context/base/cont-cz.tex +++ /dev/null @@ -1,35 +0,0 @@ -%D \module -%D [ file=cont-cz, -%D version=1998.12.02, -%D title=\CONTEXT, -%D subtitle=\CONTEXT\ Czech Format Generation, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA / Hans Hagen \& Ton Otten}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\catcode`\{=1 \catcode`\}=2 \def\defaultinterface{czech} - -\input context.tex - -\unprotect - -\setupcurrentlanguage[\s!cz] - -\loaduserspecifications - -\installlanguage [\s!en] [\c!state=\v!start] -\installlanguage [\s!de] [\c!state=\v!start] -\installlanguage [\s!sk] [\c!state=\v!start] -\installlanguage [\s!cz] [\c!state=\v!start] - -% \setupbodyfont [cmr,ams,rm,12pt] -% -% \setupencoding[default=ec] \usetypescript[modern][\defaultencoding] \setupbodyfont[modern,rm,12pt] - -\setupencoding[default=ec] \usetypescript[fallback][\defaultencoding] \setupbodyfont[rm,12pt] - -\protect \errorstopmode \dump \endinput diff --git a/Master/texmf-dist/tex/context/base/cont-fil.tex b/Master/texmf-dist/tex/context/base/cont-fil.tex index 28b6b6f55b8..b295872ca0c 100644 --- a/Master/texmf-dist/tex/context/base/cont-fil.tex +++ b/Master/texmf-dist/tex/context/base/cont-fil.tex @@ -20,7 +20,8 @@ \definefilesynonym [eenheid] [units] \definefilesynonym [einheit] [units] -\definefilesynonym [pstricks] [pstric] +\definefilesynonym [pstric] [pstricks] +\definefilesynonym [pstrick] [pstricks] \definefilesynonym [finance] [financ] diff --git a/Master/texmf-dist/tex/context/base/cont-log.tex b/Master/texmf-dist/tex/context/base/cont-log.tex index a22c1d2c4e2..9bfec299992 100644 --- a/Master/texmf-dist/tex/context/base/cont-log.tex +++ b/Master/texmf-dist/tex/context/base/cont-log.tex @@ -89,7 +89,7 @@ \kern-.11em\TeX} \def\AMSswitch#1% - {$\fam2\ifdim\bodyfontsize>1.1em\scriptstyle\fi#1$} + {$\cal\ifdim\bodyfontsize>1.1em\scriptstyle\fi#1$} \unexpanded\def\AmSTeX {\AMSswitch A% @@ -210,6 +210,7 @@ \unexpanded\def\TABLE {\TaBlE} \unexpanded\def\AMSTEX {\AmSTeX} \unexpanded\def\LAMSTEX {\LamSTeX} +\unexpanded\def\INRSTEX {inrs\TeX} %D And this is how they show up: \TeX, \MetaFont, \MetaPost, %D \PiCTeX, \TaBlE, \ConTeXt, \PPCHTeX, \AmSTeX, \LaTeX, @@ -286,14 +287,15 @@ \fi -\let\ETEX \eTeX -\let\PDFTEX \pdfTeX -\let\PDFETEX \pdfeTeX -\let\LUATEX \luaTeX -\let\XETEX \XeTeX +\let\ETEX \eTeX +\let\PDFTEX \pdfTeX +\let\PDFETEX\pdfeTeX +\let\LUATEX \luaTeX +\let\LuaTeX \luaTeX +\let\XETEX \XeTeX -\def\MkApproved - {\rotate +\unexpanded\def\MkApproved + {\dontleavehmode\rotate [\c!rotation={\ifnum\texengine=\luatexengine\ctxlua{tex.write(45-45*\the\luatexversion/100)}\else0\fi}, \c!align=\v!middle, \c!foregroundstyle=\v!type, @@ -306,4 +308,52 @@ \c!rulethickness=2pt] {Mk\ifnum\texengine=\luatexengine IV\else II\fi\\approved}} + +% \unexpanded\def\luaTeX +% {\dontleavehmode\begingroup +% Lua% +% \setbox0\hbox{oT}% +% \setbox2\hbox{o\kern0ptT}% +% \ifdim\wd0=\wd2 +% \setbox0\hbox dir TRT{To}% +% \setbox2\hbox{T\kern0pto}% +% \hskip\dimexpr\wd0-\wd2\relax +% \fi +% \TeX +% \endgroup} +% +% a further iteration from the list, patched again + +% \ifx\fontalternative\c!it -\else +% \ifx\fontalternative\c!sl -\else +% \ifx\fontalternative\c!bi -\else +% \ifx\fontalternative\c!bs -\fi\fi\fi\fi + +\def\LuaTeX + {\dontleavehmode + \begingroup + Lua% + % hope for kerning, try aT + \setbox0\hbox{aT}% + \setbox2\hbox{a\kern\zeropoint T}% + \ifdim\wd0=\wd2 % kerns can go two ways + % no aT kerning, try oT as a is not symmetrical + \setbox0\hbox{oT}% + \setbox2\hbox{o\kern\zeropoint T}% + \ifdim\wd0=\wd2 % kerns can go two ways + % no aT and oT kerning, try To + \setbox0\hbox{To}% + \setbox2\hbox{T\kern\zeropoint o}% + % maybe we need to compensate for the angle (sl/it/bs/bi) + \fi + \ifdim\wd0=\wd2\else + \kern\dimexpr\wd0-\wd2\relax + \fi + \fi + \TeX + \endgroup} + +\let\luaTeX \LuaTeX +\let\LUATEX \LuaTeX + \protect \endinput diff --git a/Master/texmf-dist/tex/context/base/cont-new.mkii b/Master/texmf-dist/tex/context/base/cont-new.mkii index 8b5bed657a5..2defba6d787 100644 --- a/Master/texmf-dist/tex/context/base/cont-new.mkii +++ b/Master/texmf-dist/tex/context/base/cont-new.mkii @@ -16,4 +16,7 @@ \long\def\startluacode#1\stopluacode{} \long\def\ctxlua #1{} +\def\enabletrackers [#1]{} +\def\disabletrackers[#1]{} + \endinput diff --git a/Master/texmf-dist/tex/context/base/cont-new.mkiv b/Master/texmf-dist/tex/context/base/cont-new.mkiv index 35b716ed46f..6269e5a6160 100644 --- a/Master/texmf-dist/tex/context/base/cont-new.mkiv +++ b/Master/texmf-dist/tex/context/base/cont-new.mkiv @@ -11,25 +11,15 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -% 2009-07-15 / vista sp 2 / 2.2G duo dell M90 precision: -% -% \dorecurse{10000}{test \page} % 300 pps -% \dorecurse {2000}{test \page} % 260 pps -% \dorecurse {300}{test \page} % 135 pps - -% \ctxlua { fonts.define.method = 2 } % normally 3 - -\enablemode[mkiv] \setsystemmode{mkiv} - % potential new defaults: % % \setbreakpoints[compound] \unprotect -% % % % % % needs testing but saves runtime +\ctxlua{logs.report = commands.report} % this will become default -\let\checknotes\relax % probably not needed, checking already done +\def\immediatemessage#1{\ctxlua{commands.writestatus("message","#1")}} % we need to figure this out (to be discussed) @@ -43,106 +33,8 @@ \unexpanded\def\textplus {\char"002B } % plus -% \def\registerviewerlayer#1#2% global ! -% {\setxvalue{(vl:#1)}{\global\dosetattribute{viewerlayer}{\ctxlua{tex.print(viewerlayers.register('#2'))}}}} - -% \setevalue{(vl:)}{\global\doresetattribute{viewerlayer}} - -\let\\=\crlf % till we fixed all styles - -% \def\pagedir{\expandafter\gobblethreearguments} -% \def\bodydir{\expandafter\gobblethreearguments} - -%D Since this can be a showstopper, we report the path at the beginning -%D as well as at the end of a run. - -% \writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath())}} -% \writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}} - -\startluacode - statistics.register("result saved in file", function() - return string.format( "%s.%s", "\outputfilename", (tex.pdfoutput>0 and "pdf") or "dvi") - end) -\stopluacode - -%D For the moment we report some statistics. Later this will become an option, -%D but for now we need this information. - -\def\nomkivstatistics{\ctxlua{statistics.enable = false}} % for taco - -\def\resettimer {\ctxlua{environment.starttime = os.clock()}} -\def\elapsedtime {\ctxlua{tex.sprint(os.clock()-environment.starttime)}} -\let\elapsedseconds \elapsedtime +% till we fixed all styles -% we will have a bunch of extra tracers (--dumphash --dumpdelta) - -\def\tracersdumphash {\ctxlua{tracers.register_dump_hash(false)}} -\def\tracersdumpdelta{\ctxlua{tracers.register_dump_hash(true)}} - -\resettimer - -%D For me. - -\def\traceluausage - {\dosingleempty\dotraceluausage} - -\def\dotraceluausage[#1]% - {\ctxlua{debugger.enable()}% - \appendtoks\ctxlua{debugger.disable() debugger.showstats(print,\doifnumberelse{#1}{#1}{5000})}\to\everybye} - -%D Fonts (experimental AFM loading} - -% \ctxlua { -% remapper.define('encoding','^lm' ,'^(.*)$','lm-\letterpercent1') -% remapper.define('encoding','^qbk','^(.*)$','q-\letterpercent1') -% remapper.define('encoding','^qcs','^(.*)$','q-\letterpercent1') -% remapper.define('encoding','^qpl','^(.*)$','q-\letterpercent1') -% remapper.define('encoding','^qtm','^(.*)$','q-\letterpercent1') -% } - -% \appendtoksonce \loadallXfontmapfiles \to \pdfbackendeveryximage -% \appendtoksonce \loadallXfontmapfiles \to \pdfbackendeveryxform -% \appendtoksonce \loadallXfontmapfiles \to \everystarttext -% \appendtoksonce \loadallXfontmapfiles \to \everybeforepagebody - -% \def\loadallXfontmapfiles{\ctxlua{fonts.map.flush("pdftex")}} - -% \ctxlua{ -% do -% local pth = "." .. io.fileseparator .. "tmp" .. io.fileseparator .. "\jobname" -% texio.write_nl("CREATING "..pth) -% os.execute("mkdir " .. pth) -% end -% resolvers.output_files = { } -% callback.register('find_write_file', function(id,name) -% resolvers.output_files[name] = file.join(".","tmp","\jobname",name) -% texio.write_nl("REDIRECTING OUTPUT "..name.. " TO " .. resolvers.output_files[name]) -% return resolvers.output_files[name] -% end) -% callback.register('find_read_file', function(id,name) -% local sname = string.gsub(name,"^\letterpercent./","") -% if resolvers.output_files[sname] then -% return resolvers.output_files[name] -% elseif string.find(sname,"^\jobname[\letterpercent.\letterpercent-]") then -% local n = file.join(".","tmp","\jobname",sname) -% local f = io.open(n) -% if f then -% resolvers.output_files[name] = n -% texio.write_nl("REDIRECTING INPUT "..sname.. " TO " .. n) -% f:close() -% return n -% else -% return resolvers.findtexfile(name) -% end -% else -% return resolvers.findtexfile(name) -% end -% end) -% } - -\definestartstop[randomized][\c!before=\dosetattribute{case}{8},\c!after=] +\let\\=\crlf \protect \endinput - -% \expanded{\defineactivecharacter \number"2000E} {\textdir TRT\relax} -% \expanded{\defineactivecharacter \number"2000F} {\textdir TLT\relax} diff --git a/Master/texmf-dist/tex/context/base/cont-new.tex b/Master/texmf-dist/tex/context/base/cont-new.tex index 161d4c9182e..9c4fdba18f9 100644 --- a/Master/texmf-dist/tex/context/base/cont-new.tex +++ b/Master/texmf-dist/tex/context/base/cont-new.tex @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2009.08.19 17:10} +\newcontextversion{2010.05.24 13:05} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/Master/texmf-dist/tex/context/base/cont-uk.tex b/Master/texmf-dist/tex/context/base/cont-uk.tex deleted file mode 100644 index d900217e1dc..00000000000 --- a/Master/texmf-dist/tex/context/base/cont-uk.tex +++ /dev/null @@ -1,38 +0,0 @@ -%D \module -%D [ file=cont-uk, -%D version=1997.08.19, -%D title=\CONTEXT, -%D subtitle=\CONTEXT\ English Format Generation, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright={PRAGMA / Hans Hagen \& Ton Otten}] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\catcode`\{=1 \catcode`\}=2 \def\defaultinterface{english} - -\input context.tex - -\unprotect - -\setupcurrentlanguage[\s!uk] - -\loaduserspecifications - -\installlanguage [\s!en] [\c!state=\v!start] -\installlanguage [\s!uk] [\c!state=\v!start] -\installlanguage [\s!de] [\c!state=\v!start] -\installlanguage [\s!fr] [\c!state=\v!start] -\installlanguage [\s!es] [\c!state=\v!start] -\installlanguage [\s!nl] [\c!state=\v!start] -\installlanguage [\s!it] [\c!state=\v!start] - -% \setupbodyfont [cmr,ams,rm,12pt] -% -% \setupencoding[default=ec] \usetypescript[modern][\defaultencoding] \setupbodyfont[modern,rm,12pt] - -\setupencoding[default=ec] \usetypescript[fallback][\defaultencoding] \setupbodyfont[rm,12pt] - -\protect \errorstopmode \dump \endinput diff --git a/Master/texmf-dist/tex/context/base/context-base.lmx b/Master/texmf-dist/tex/context/base/context-base.lmx index 5c96b497953..fd27927bf82 100644 --- a/Master/texmf-dist/tex/context/base/context-base.lmx +++ b/Master/texmf-dist/tex/context/base/context-base.lmx @@ -14,25 +14,63 @@ <?lua pv('title') ?> + 0) and v('refreshurl') then ?> + + + + + + +
+ -
- -
-
- -
- -
-
-
-
- -
+
+
+
+ +
+
+
+
+
+
+ +
+
+
+
+
+
+ +
+
+
+ +
+
+ +
+
+ +
+ diff --git a/Master/texmf-dist/tex/context/base/context-characters.lmx b/Master/texmf-dist/tex/context/base/context-characters.lmx index b2ddee64e83..f018e412ba9 100644 --- a/Master/texmf-dist/tex/context/base/context-characters.lmx +++ b/Master/texmf-dist/tex/context/base/context-characters.lmx @@ -12,7 +12,7 @@ -